Internal change
PiperOrigin-RevId: 570745425
This commit is contained in:
parent
3b99f8d9dd
commit
d2baba6dbb
|
@ -92,7 +92,7 @@ class BypassCalculator : public Node {
|
|||
auto options = cc->Options<BypassCalculatorOptions>();
|
||||
RET_CHECK_EQ(options.pass_input_stream().size(),
|
||||
options.pass_output_stream().size());
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto pass_streams,
|
||||
GetPassMap(options, *cc->Inputs().TagMap(), *cc->Outputs().TagMap()));
|
||||
std::set<CollectionItemId> pass_out;
|
||||
|
@ -121,8 +121,9 @@ class BypassCalculator : public Node {
|
|||
// Saves the map of passthrough input and output stream ids.
|
||||
absl::Status Open(CalculatorContext* cc) override {
|
||||
auto options = cc->Options<BypassCalculatorOptions>();
|
||||
ASSIGN_OR_RETURN(pass_streams_, GetPassMap(options, *cc->Inputs().TagMap(),
|
||||
*cc->Outputs().TagMap()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
pass_streams_,
|
||||
GetPassMap(options, *cc->Inputs().TagMap(), *cc->Outputs().TagMap()));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ class GlTextureWarpAffineRunner
|
|||
absl::StrCat(mediapipe::kMediaPipeFragmentShaderPreamble,
|
||||
interpolation_def, kFragShader);
|
||||
|
||||
ASSIGN_OR_RETURN(program_, create_fn(vert_src, frag_src));
|
||||
MP_ASSIGN_OR_RETURN(program_, create_fn(vert_src, frag_src));
|
||||
|
||||
auto create_custom_zero_fn = [&]() -> absl::StatusOr<Program> {
|
||||
std::string custom_zero_border_mode_def = R"(
|
||||
|
@ -236,10 +236,10 @@ class GlTextureWarpAffineRunner
|
|||
};
|
||||
#if GL_CLAMP_TO_BORDER_MAY_BE_SUPPORTED
|
||||
if (!IsGlClampToBorderSupported(gl_helper_->GetGlContext())) {
|
||||
ASSIGN_OR_RETURN(program_custom_zero_, create_custom_zero_fn());
|
||||
MP_ASSIGN_OR_RETURN(program_custom_zero_, create_custom_zero_fn());
|
||||
}
|
||||
#else
|
||||
ASSIGN_OR_RETURN(program_custom_zero_, create_custom_zero_fn());
|
||||
MP_ASSIGN_OR_RETURN(program_custom_zero_, create_custom_zero_fn());
|
||||
#endif // GL_CLAMP_TO_BORDER_MAY_BE_SUPPORTED
|
||||
|
||||
glGenFramebuffers(1, &framebuffer_);
|
||||
|
|
|
@ -92,11 +92,11 @@ absl::StatusOr<ImageFileProperties> GetImageFileProperites(
|
|||
properties.set_focal_length_mm(result.FocalLength);
|
||||
properties.set_focal_length_35mm(result.FocalLengthIn35mm);
|
||||
|
||||
ASSIGN_OR_RETURN(auto focal_length_pixels,
|
||||
ComputeFocalLengthInPixels(properties.image_width(),
|
||||
properties.image_height(),
|
||||
properties.focal_length_35mm(),
|
||||
properties.focal_length_mm()));
|
||||
MP_ASSIGN_OR_RETURN(auto focal_length_pixels,
|
||||
ComputeFocalLengthInPixels(properties.image_width(),
|
||||
properties.image_height(),
|
||||
properties.focal_length_35mm(),
|
||||
properties.focal_length_mm()));
|
||||
properties.set_focal_length_pixels(focal_length_pixels);
|
||||
|
||||
return properties;
|
||||
|
@ -151,7 +151,7 @@ class ImageFilePropertiesCalculator : public CalculatorBase {
|
|||
if (cc->InputSidePackets().NumEntries() == 1) {
|
||||
const std::string& image_bytes =
|
||||
cc->InputSidePackets().Index(0).Get<std::string>();
|
||||
ASSIGN_OR_RETURN(properties_, GetImageFileProperites(image_bytes));
|
||||
MP_ASSIGN_OR_RETURN(properties_, GetImageFileProperites(image_bytes));
|
||||
read_properties_ = true;
|
||||
}
|
||||
|
||||
|
@ -169,7 +169,7 @@ class ImageFilePropertiesCalculator : public CalculatorBase {
|
|||
return absl::OkStatus();
|
||||
}
|
||||
const std::string& image_bytes = cc->Inputs().Index(0).Get<std::string>();
|
||||
ASSIGN_OR_RETURN(properties_, GetImageFileProperites(image_bytes));
|
||||
MP_ASSIGN_OR_RETURN(properties_, GetImageFileProperites(image_bytes));
|
||||
read_properties_ = true;
|
||||
}
|
||||
if (read_properties_) {
|
||||
|
|
|
@ -79,8 +79,8 @@ class WarpAffineRunnerHolder<ImageFrame> {
|
|||
}
|
||||
absl::StatusOr<RunnerType*> GetRunner() {
|
||||
if (!runner_) {
|
||||
ASSIGN_OR_RETURN(runner_,
|
||||
CreateAffineTransformationOpenCvRunner(interpolation_));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
runner_, CreateAffineTransformationOpenCvRunner(interpolation_));
|
||||
}
|
||||
return runner_.get();
|
||||
}
|
||||
|
@ -108,8 +108,9 @@ class WarpAffineRunnerHolder<mediapipe::GpuBuffer> {
|
|||
}
|
||||
absl::StatusOr<RunnerType*> GetRunner() {
|
||||
if (!runner_) {
|
||||
ASSIGN_OR_RETURN(runner_, CreateAffineTransformationGlRunner(
|
||||
gl_helper_, gpu_origin_, interpolation_));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
runner_, CreateAffineTransformationGlRunner(gl_helper_, gpu_origin_,
|
||||
interpolation_));
|
||||
}
|
||||
return runner_.get();
|
||||
}
|
||||
|
@ -151,24 +152,25 @@ class WarpAffineRunnerHolder<mediapipe::Image> {
|
|||
AffineTransformation::BorderMode border_mode) override {
|
||||
if (input.UsesGpu()) {
|
||||
#if !MEDIAPIPE_DISABLE_GPU
|
||||
ASSIGN_OR_RETURN(auto* runner, gpu_holder_.GetRunner());
|
||||
ASSIGN_OR_RETURN(auto result, runner->Run(input.GetGpuBuffer(), matrix,
|
||||
size, border_mode));
|
||||
MP_ASSIGN_OR_RETURN(auto* runner, gpu_holder_.GetRunner());
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto result,
|
||||
runner->Run(input.GetGpuBuffer(), matrix, size, border_mode));
|
||||
return mediapipe::Image(*result);
|
||||
#else
|
||||
return absl::UnavailableError("GPU support is disabled");
|
||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||
}
|
||||
#if !MEDIAPIPE_DISABLE_OPENCV
|
||||
ASSIGN_OR_RETURN(auto* runner, cpu_holder_.GetRunner());
|
||||
MP_ASSIGN_OR_RETURN(auto* runner, cpu_holder_.GetRunner());
|
||||
const auto& frame_ptr = input.GetImageFrameSharedPtr();
|
||||
// Wrap image into image frame.
|
||||
const ImageFrame image_frame(frame_ptr->Format(), frame_ptr->Width(),
|
||||
frame_ptr->Height(), frame_ptr->WidthStep(),
|
||||
const_cast<uint8_t*>(frame_ptr->PixelData()),
|
||||
[](uint8_t* data){});
|
||||
ASSIGN_OR_RETURN(auto result,
|
||||
runner->Run(image_frame, matrix, size, border_mode));
|
||||
MP_ASSIGN_OR_RETURN(auto result,
|
||||
runner->Run(image_frame, matrix, size, border_mode));
|
||||
return mediapipe::Image(std::make_shared<ImageFrame>(std::move(result)));
|
||||
#else
|
||||
return absl::UnavailableError("OpenCV support is disabled");
|
||||
|
@ -213,8 +215,8 @@ class WarpAffineCalculatorImpl : public mediapipe::api2::NodeImpl<InterfaceT> {
|
|||
AffineTransformation::Size output_size;
|
||||
output_size.width = out_width;
|
||||
output_size.height = out_height;
|
||||
ASSIGN_OR_RETURN(auto* runner, holder_.GetRunner());
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(auto* runner, holder_.GetRunner());
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto result,
|
||||
runner->Run(
|
||||
*InterfaceT::kInImage(cc), transform, output_size,
|
||||
|
|
|
@ -517,8 +517,8 @@ absl::Status AudioToTensorCalculator::OutputTensor(const Matrix& block,
|
|||
// The last two elements are Nyquist component.
|
||||
fft_output_matrix(fft_size_ - 2) = fft_output_[1]; // Nyquist real part
|
||||
fft_output_matrix(fft_size_ - 1) = 0.0f; // Nyquist imagery part
|
||||
ASSIGN_OR_RETURN(output_tensor, ConvertToTensor(fft_output_matrix,
|
||||
{2, fft_size_ / 2}));
|
||||
MP_ASSIGN_OR_RETURN(output_tensor, ConvertToTensor(fft_output_matrix,
|
||||
{2, fft_size_ / 2}));
|
||||
break;
|
||||
}
|
||||
case Options::WITH_DC_AND_NYQUIST: {
|
||||
|
@ -529,7 +529,7 @@ absl::Status AudioToTensorCalculator::OutputTensor(const Matrix& block,
|
|||
// The last two elements are Nyquist component.
|
||||
fft_output_matrix(fft_size_) = fft_output_[1]; // Nyquist real part
|
||||
fft_output_matrix(fft_size_ + 1) = 0.0f; // Nyquist imagery part
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
output_tensor,
|
||||
ConvertToTensor(fft_output_matrix, {2, (fft_size_ + 2) / 2}));
|
||||
break;
|
||||
|
@ -537,7 +537,7 @@ absl::Status AudioToTensorCalculator::OutputTensor(const Matrix& block,
|
|||
case Options::WITHOUT_DC_AND_NYQUIST: {
|
||||
Matrix fft_output_matrix =
|
||||
Eigen::Map<const Matrix>(fft_output_.data() + 2, 1, fft_size_ - 2);
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
output_tensor,
|
||||
ConvertToTensor(fft_output_matrix, {2, (fft_size_ - 2) / 2}));
|
||||
break;
|
||||
|
@ -547,8 +547,8 @@ absl::Status AudioToTensorCalculator::OutputTensor(const Matrix& block,
|
|||
}
|
||||
|
||||
} else {
|
||||
ASSIGN_OR_RETURN(output_tensor,
|
||||
ConvertToTensor(block, {num_channels_, num_samples_}));
|
||||
MP_ASSIGN_OR_RETURN(output_tensor,
|
||||
ConvertToTensor(block, {num_channels_, num_samples_}));
|
||||
}
|
||||
kTensorsOut(cc).Send(std::move(output_tensor), timestamp);
|
||||
return absl::OkStatus();
|
||||
|
|
|
@ -161,9 +161,9 @@ absl::Status BertPreprocessorCalculator::Open(CalculatorContext* cc) {
|
|||
&kMetadataExtractorSideIn(cc).Get();
|
||||
const tflite::ProcessUnit* tokenizer_metadata =
|
||||
metadata_extractor->GetInputProcessUnit(kTokenizerProcessUnitIndex);
|
||||
ASSIGN_OR_RETURN(tokenizer_,
|
||||
tasks::text::tokenizers::CreateTokenizerFromProcessUnit(
|
||||
tokenizer_metadata, metadata_extractor));
|
||||
MP_ASSIGN_OR_RETURN(tokenizer_,
|
||||
tasks::text::tokenizers::CreateTokenizerFromProcessUnit(
|
||||
tokenizer_metadata, metadata_extractor));
|
||||
|
||||
auto* input_tensors_metadata = metadata_extractor->GetInputTensorMetadata();
|
||||
input_ids_tensor_index_ = FindTensorIndexByMetadataName(
|
||||
|
|
|
@ -67,9 +67,10 @@ absl::StatusOr<std::vector<std::vector<int>>> RunBertPreprocessorCalculator(
|
|||
tool::AddVectorSink("tensors", &graph_config, &output_packets);
|
||||
|
||||
std::string model_buffer = tasks::core::LoadBinaryContent(model_path.data());
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(
|
||||
model_buffer.data(), model_buffer.size()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(),
|
||||
model_buffer.size()));
|
||||
// Run the graph.
|
||||
CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(
|
||||
|
|
|
@ -192,18 +192,19 @@ class ImageToTensorCalculator : public Node {
|
|||
}
|
||||
|
||||
#if MEDIAPIPE_DISABLE_GPU
|
||||
ASSIGN_OR_RETURN(auto image, GetInputImage(kIn(cc)));
|
||||
MP_ASSIGN_OR_RETURN(auto image, GetInputImage(kIn(cc)));
|
||||
#else
|
||||
const bool is_input_gpu = kInGpu(cc).IsConnected();
|
||||
ASSIGN_OR_RETURN(auto image, is_input_gpu ? GetInputImage(kInGpu(cc))
|
||||
: GetInputImage(kIn(cc)));
|
||||
MP_ASSIGN_OR_RETURN(auto image, is_input_gpu ? GetInputImage(kInGpu(cc))
|
||||
: GetInputImage(kIn(cc)));
|
||||
#endif // MEDIAPIPE_DISABLE_GPU
|
||||
|
||||
RotatedRect roi = GetRoi(image->width(), image->height(), norm_rect);
|
||||
const int tensor_width = params_.output_width.value_or(image->width());
|
||||
const int tensor_height = params_.output_height.value_or(image->height());
|
||||
ASSIGN_OR_RETURN(auto padding, PadRoi(tensor_width, tensor_height,
|
||||
options_.keep_aspect_ratio(), &roi));
|
||||
MP_ASSIGN_OR_RETURN(auto padding,
|
||||
PadRoi(tensor_width, tensor_height,
|
||||
options_.keep_aspect_ratio(), &roi));
|
||||
if (kOutLetterboxPadding(cc).IsConnected()) {
|
||||
kOutLetterboxPadding(cc).Send(padding);
|
||||
}
|
||||
|
@ -247,20 +248,20 @@ class ImageToTensorCalculator : public Node {
|
|||
if (!gpu_converter_) {
|
||||
#if !MEDIAPIPE_DISABLE_GPU
|
||||
#if MEDIAPIPE_METAL_ENABLED
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
gpu_converter_,
|
||||
CreateMetalConverter(cc, GetBorderMode(options_.border_mode())));
|
||||
#elif MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_31
|
||||
ASSIGN_OR_RETURN(gpu_converter_,
|
||||
CreateImageToGlBufferTensorConverter(
|
||||
cc, DoesGpuInputStartAtBottom(options_),
|
||||
GetBorderMode(options_.border_mode())));
|
||||
MP_ASSIGN_OR_RETURN(gpu_converter_,
|
||||
CreateImageToGlBufferTensorConverter(
|
||||
cc, DoesGpuInputStartAtBottom(options_),
|
||||
GetBorderMode(options_.border_mode())));
|
||||
#else
|
||||
if (!gpu_converter_) {
|
||||
ASSIGN_OR_RETURN(gpu_converter_,
|
||||
CreateImageToGlTextureTensorConverter(
|
||||
cc, DoesGpuInputStartAtBottom(options_),
|
||||
GetBorderMode(options_.border_mode())));
|
||||
MP_ASSIGN_OR_RETURN(gpu_converter_,
|
||||
CreateImageToGlTextureTensorConverter(
|
||||
cc, DoesGpuInputStartAtBottom(options_),
|
||||
GetBorderMode(options_.border_mode())));
|
||||
}
|
||||
if (!gpu_converter_) {
|
||||
return absl::UnimplementedError(
|
||||
|
@ -272,18 +273,20 @@ class ImageToTensorCalculator : public Node {
|
|||
} else {
|
||||
if (!cpu_converter_) {
|
||||
#if !MEDIAPIPE_DISABLE_OPENCV
|
||||
ASSIGN_OR_RETURN(cpu_converter_,
|
||||
CreateOpenCvConverter(
|
||||
cc, GetBorderMode(options_.border_mode()),
|
||||
GetOutputTensorType(/*uses_gpu=*/false, params_)));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
cpu_converter_,
|
||||
CreateOpenCvConverter(
|
||||
cc, GetBorderMode(options_.border_mode()),
|
||||
GetOutputTensorType(/*uses_gpu=*/false, params_)));
|
||||
// TODO: FrameBuffer-based converter needs to call GetGpuBuffer()
|
||||
// to get access to a FrameBuffer view. Investigate if GetGpuBuffer() can be
|
||||
// made available even with MEDIAPIPE_DISABLE_GPU set.
|
||||
#elif MEDIAPIPE_ENABLE_HALIDE
|
||||
ASSIGN_OR_RETURN(cpu_converter_,
|
||||
CreateFrameBufferConverter(
|
||||
cc, GetBorderMode(options_.border_mode()),
|
||||
GetOutputTensorType(/*uses_gpu=*/false, params_)));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
cpu_converter_,
|
||||
CreateFrameBufferConverter(
|
||||
cc, GetBorderMode(options_.border_mode()),
|
||||
GetOutputTensorType(/*uses_gpu=*/false, params_)));
|
||||
#else
|
||||
ABSL_LOG(FATAL) << "Cannot create image to tensor CPU converter since "
|
||||
"MEDIAPIPE_DISABLE_OPENCV is defined and "
|
||||
|
|
|
@ -175,9 +175,9 @@ absl::Status FrameBufferProcessor::CropRotateResize90Degrees(
|
|||
cropped_buffer_ = std::make_unique<uint8_t[]>(cropped_buffer_size);
|
||||
cropped_buffer_size_ = cropped_buffer_size;
|
||||
}
|
||||
ASSIGN_OR_RETURN(cropped,
|
||||
frame_buffer::CreateFromRawBuffer(
|
||||
cropped_buffer_.get(), cropped_dims, input->format()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
cropped, frame_buffer::CreateFromRawBuffer(
|
||||
cropped_buffer_.get(), cropped_dims, input->format()));
|
||||
}
|
||||
MP_RETURN_IF_ERROR(
|
||||
frame_buffer::Crop(*input, left, top, right, bottom, cropped.get()));
|
||||
|
@ -194,9 +194,9 @@ absl::Status FrameBufferProcessor::CropRotateResize90Degrees(
|
|||
rotated_buffer_ = std::make_unique<uint8_t[]>(rotated_buffer_size);
|
||||
rotated_buffer_size_ = rotated_buffer_size;
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto rotated, frame_buffer::CreateFromRawBuffer(
|
||||
rotated_buffer_.get(), rotated_dims,
|
||||
cropped->format()));
|
||||
MP_ASSIGN_OR_RETURN(auto rotated, frame_buffer::CreateFromRawBuffer(
|
||||
rotated_buffer_.get(), rotated_dims,
|
||||
cropped->format()));
|
||||
}
|
||||
MP_RETURN_IF_ERROR(
|
||||
frame_buffer::Rotate(*cropped, rotation_degrees, rotated.get()));
|
||||
|
@ -217,9 +217,10 @@ absl::Status FrameBufferProcessor::ConvertToFloatTensor(
|
|||
RET_CHECK(output_tensor.element_type() == Tensor::ElementType::kFloat32);
|
||||
constexpr float kInputImageRangeMin = 0.0f;
|
||||
constexpr float kInputImageRangeMax = 255.0f;
|
||||
ASSIGN_OR_RETURN(auto transform, GetValueRangeTransformation(
|
||||
kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto transform,
|
||||
GetValueRangeTransformation(kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
return frame_buffer::ToFloatTensor(*input_frame, transform.scale,
|
||||
transform.offset, output_tensor);
|
||||
}
|
||||
|
|
|
@ -255,7 +255,7 @@ class GlProcessor : public ImageToTensorConverter {
|
|||
<< "OpenGL ES 3.1 is required.";
|
||||
command_queue_ = tflite::gpu::gl::NewCommandQueue(gpu_info);
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto extractor,
|
||||
SubRectExtractorGl::Create(gl_helper_.GetGlContext(),
|
||||
input_starts_at_bottom, border_mode));
|
||||
|
@ -293,10 +293,10 @@ class GlProcessor : public ImageToTensorConverter {
|
|||
|
||||
constexpr float kInputImageRangeMin = 0.0f;
|
||||
constexpr float kInputImageRangeMax = 1.0f;
|
||||
ASSIGN_OR_RETURN(auto transform,
|
||||
GetValueRangeTransformation(kInputImageRangeMin,
|
||||
kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
MP_ASSIGN_OR_RETURN(auto transform,
|
||||
GetValueRangeTransformation(
|
||||
kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
|
||||
const int output_size = output_tensor.bytes() / output_shape.dims[0];
|
||||
auto buffer_view = output_tensor.GetOpenGlBufferWriteView();
|
||||
|
|
|
@ -193,10 +193,10 @@ class GlProcessor : public ImageToTensorConverter {
|
|||
|
||||
constexpr float kInputImageRangeMin = 0.0f;
|
||||
constexpr float kInputImageRangeMax = 1.0f;
|
||||
ASSIGN_OR_RETURN(auto transform,
|
||||
GetValueRangeTransformation(kInputImageRangeMin,
|
||||
kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
MP_ASSIGN_OR_RETURN(auto transform,
|
||||
GetValueRangeTransformation(
|
||||
kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
auto tensor_view = output_tensor.GetOpenGlTexture2dWriteView();
|
||||
MP_RETURN_IF_ERROR(ExtractSubRect(input_texture, roi,
|
||||
/*flip_horizontaly=*/false,
|
||||
|
|
|
@ -345,9 +345,9 @@ class MetalProcessor : public ImageToTensorConverter {
|
|||
absl::Status Init(CalculatorContext* cc, BorderMode border_mode) {
|
||||
metal_helper_ = [[MPPMetalHelper alloc] initWithCalculatorContext:cc];
|
||||
RET_CHECK(metal_helper_);
|
||||
ASSIGN_OR_RETURN(extractor_, SubRectExtractorMetal::Make(
|
||||
metal_helper_.mtlDevice,
|
||||
OutputFormat::kF32C4, border_mode));
|
||||
MP_ASSIGN_OR_RETURN(extractor_, SubRectExtractorMetal::Make(
|
||||
metal_helper_.mtlDevice,
|
||||
OutputFormat::kF32C4, border_mode));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
@ -373,7 +373,7 @@ class MetalProcessor : public ImageToTensorConverter {
|
|||
|
||||
constexpr float kInputImageRangeMin = 0.0f;
|
||||
constexpr float kInputImageRangeMax = 1.0f;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto transform,
|
||||
GetValueRangeTransformation(kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
|
|
|
@ -159,7 +159,7 @@ class OpenCvProcessor : public ImageToTensorConverter {
|
|||
|
||||
constexpr float kInputImageRangeMin = 0.0f;
|
||||
constexpr float kInputImageRangeMax = 255.0f;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto transform,
|
||||
GetValueRangeTransformation(kInputImageRangeMin, kInputImageRangeMax,
|
||||
range_min, range_max));
|
||||
|
|
|
@ -60,7 +60,7 @@ absl::Status InferenceCalculatorCpuImpl::UpdateContract(
|
|||
}
|
||||
|
||||
absl::Status InferenceCalculatorCpuImpl::Open(CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(inference_runner_, CreateInferenceRunner(cc));
|
||||
MP_ASSIGN_OR_RETURN(inference_runner_, CreateInferenceRunner(cc));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
@ -71,8 +71,8 @@ absl::Status InferenceCalculatorCpuImpl::Process(CalculatorContext* cc) {
|
|||
const auto& input_tensors = *kInTensors(cc);
|
||||
RET_CHECK(!input_tensors.empty());
|
||||
|
||||
ASSIGN_OR_RETURN(std::vector<Tensor> output_tensors,
|
||||
inference_runner_->Run(cc, input_tensors));
|
||||
MP_ASSIGN_OR_RETURN(std::vector<Tensor> output_tensors,
|
||||
inference_runner_->Run(cc, input_tensors));
|
||||
kOutTensors(cc).Send(std::move(output_tensors));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
@ -84,11 +84,11 @@ absl::Status InferenceCalculatorCpuImpl::Close(CalculatorContext* cc) {
|
|||
|
||||
absl::StatusOr<std::unique_ptr<InferenceRunner>>
|
||||
InferenceCalculatorCpuImpl::CreateInferenceRunner(CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(auto model_packet, GetModelAsPacket(cc));
|
||||
ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto model_packet, GetModelAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
const int interpreter_num_threads =
|
||||
cc->Options<mediapipe::InferenceCalculatorOptions>().cpu_num_thread();
|
||||
ASSIGN_OR_RETURN(TfLiteDelegatePtr delegate, MaybeCreateDelegate(cc));
|
||||
MP_ASSIGN_OR_RETURN(TfLiteDelegatePtr delegate, MaybeCreateDelegate(cc));
|
||||
return CreateInferenceInterpreterDelegateRunner(
|
||||
std::move(model_packet), std::move(op_resolver_packet),
|
||||
std::move(delegate), interpreter_num_threads);
|
||||
|
|
|
@ -100,7 +100,7 @@ absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::Init(
|
|||
|
||||
absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::LoadModel(
|
||||
CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
const auto& model = *model_packet_.Get();
|
||||
if (kSideInOpResolver(cc).IsConnected()) {
|
||||
const tflite::OpResolver& op_resolver = kSideInOpResolver(cc).Get();
|
||||
|
|
|
@ -170,7 +170,7 @@ absl::Status
|
|||
InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::InitTFLiteGPURunner(
|
||||
CalculatorContext* cc,
|
||||
const mediapipe::InferenceCalculatorOptions::Delegate& delegate) {
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
const auto& model = *model_packet_.Get();
|
||||
|
||||
bool allow_precision_loss = delegate.gpu().allow_precision_loss();
|
||||
|
@ -306,16 +306,16 @@ InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::SaveGpuCaches(
|
|||
tflite::gpu::TFLiteGPURunner* gpu_runner) const {
|
||||
if (use_kernel_caching_) {
|
||||
// Save kernel file.
|
||||
ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
|
||||
gpu_runner->GetSerializedBinaryCache());
|
||||
MP_ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
|
||||
gpu_runner->GetSerializedBinaryCache());
|
||||
std::string cache_str(kernel_cache.begin(), kernel_cache.end());
|
||||
MP_RETURN_IF_ERROR(
|
||||
mediapipe::file::SetContents(cached_kernel_filename_, cache_str));
|
||||
}
|
||||
if (use_serialized_model_) {
|
||||
// Save serialized model file.
|
||||
ASSIGN_OR_RETURN(std::vector<uint8_t> serialized_model_vec,
|
||||
gpu_runner->GetSerializedModel());
|
||||
MP_ASSIGN_OR_RETURN(std::vector<uint8_t> serialized_model_vec,
|
||||
gpu_runner->GetSerializedModel());
|
||||
absl::string_view serialized_model(
|
||||
reinterpret_cast<char*>(serialized_model_vec.data()),
|
||||
serialized_model_vec.size());
|
||||
|
@ -412,8 +412,8 @@ absl::Status InferenceCalculatorGlAdvancedImpl::Process(CalculatorContext* cc) {
|
|||
RET_CHECK(!input_tensors.empty());
|
||||
auto output_tensors = absl::make_unique<std::vector<Tensor>>();
|
||||
|
||||
ASSIGN_OR_RETURN(*output_tensors,
|
||||
gpu_inference_runner_->Process(cc, input_tensors));
|
||||
MP_ASSIGN_OR_RETURN(*output_tensors,
|
||||
gpu_inference_runner_->Process(cc, input_tensors));
|
||||
|
||||
kOutTensors(cc).Send(std::move(output_tensors));
|
||||
return absl::OkStatus();
|
||||
|
|
|
@ -208,9 +208,9 @@ absl::Status InferenceCalculatorMetalImpl::Close(CalculatorContext* cc) {
|
|||
|
||||
absl::Status InferenceCalculatorMetalImpl::InitInterpreter(
|
||||
CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc));
|
||||
const auto& model = *model_packet_.Get();
|
||||
ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
const auto& op_resolver = op_resolver_packet.Get();
|
||||
tflite::InterpreterBuilder interpreter_builder(model, op_resolver);
|
||||
AddDelegate(cc, &interpreter_builder);
|
||||
|
|
|
@ -58,7 +58,7 @@ absl::Status InferenceCalculatorXnnpackImpl::UpdateContract(
|
|||
}
|
||||
|
||||
absl::Status InferenceCalculatorXnnpackImpl::Open(CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(inference_runner_, CreateInferenceRunner(cc));
|
||||
MP_ASSIGN_OR_RETURN(inference_runner_, CreateInferenceRunner(cc));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
@ -69,8 +69,8 @@ absl::Status InferenceCalculatorXnnpackImpl::Process(CalculatorContext* cc) {
|
|||
const auto& input_tensors = *kInTensors(cc);
|
||||
RET_CHECK(!input_tensors.empty());
|
||||
|
||||
ASSIGN_OR_RETURN(std::vector<Tensor> output_tensors,
|
||||
inference_runner_->Run(cc, input_tensors));
|
||||
MP_ASSIGN_OR_RETURN(std::vector<Tensor> output_tensors,
|
||||
inference_runner_->Run(cc, input_tensors));
|
||||
kOutTensors(cc).Send(std::move(output_tensors));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
@ -82,11 +82,11 @@ absl::Status InferenceCalculatorXnnpackImpl::Close(CalculatorContext* cc) {
|
|||
|
||||
absl::StatusOr<std::unique_ptr<InferenceRunner>>
|
||||
InferenceCalculatorXnnpackImpl::CreateInferenceRunner(CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(auto model_packet, GetModelAsPacket(cc));
|
||||
ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto model_packet, GetModelAsPacket(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto op_resolver_packet, GetOpResolverAsPacket(cc));
|
||||
const int interpreter_num_threads =
|
||||
cc->Options<mediapipe::InferenceCalculatorOptions>().cpu_num_thread();
|
||||
ASSIGN_OR_RETURN(TfLiteDelegatePtr delegate, CreateDelegate(cc));
|
||||
MP_ASSIGN_OR_RETURN(TfLiteDelegatePtr delegate, CreateDelegate(cc));
|
||||
return CreateInferenceInterpreterDelegateRunner(
|
||||
std::move(model_packet), std::move(op_resolver_packet),
|
||||
std::move(delegate), interpreter_num_threads);
|
||||
|
|
|
@ -106,7 +106,7 @@ absl::Status RegexPreprocessorCalculator::Open(CalculatorContext* cc) {
|
|||
return absl::InvalidArgumentError("No tensor metadata found");
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const auto* tokenizer_metadata,
|
||||
metadata_extractor->FindFirstProcessUnit(
|
||||
*tensor_metadata, tflite::ProcessUnitOptions_RegexTokenizerOptions));
|
||||
|
@ -115,9 +115,9 @@ absl::Status RegexPreprocessorCalculator::Open(CalculatorContext* cc) {
|
|||
}
|
||||
const tflite::RegexTokenizerOptions* regex_tokenizer_options =
|
||||
tokenizer_metadata->options_as<tflite::RegexTokenizerOptions>();
|
||||
ASSIGN_OR_RETURN(tokenizer_,
|
||||
tasks::text::tokenizers::CreateRegexTokenizerFromOptions(
|
||||
regex_tokenizer_options, metadata_extractor));
|
||||
MP_ASSIGN_OR_RETURN(tokenizer_,
|
||||
tasks::text::tokenizers::CreateRegexTokenizerFromOptions(
|
||||
regex_tokenizer_options, metadata_extractor));
|
||||
|
||||
const auto& options =
|
||||
cc->Options<mediapipe::RegexPreprocessorCalculatorOptions>();
|
||||
|
|
|
@ -67,9 +67,10 @@ absl::StatusOr<std::vector<int>> RunRegexPreprocessorCalculator(
|
|||
tool::AddVectorSink("tensors", &graph_config, &output_packets);
|
||||
|
||||
std::string model_buffer = tasks::core::LoadBinaryContent(kTestModelPath);
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(
|
||||
model_buffer.data(), model_buffer.size()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(),
|
||||
model_buffer.size()));
|
||||
// Run the graph.
|
||||
CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(
|
||||
|
|
|
@ -656,7 +656,7 @@ absl::Status TensorConverterCalculator::LoadOptions(CalculatorContext* cc,
|
|||
}
|
||||
|
||||
// Get y-flip mode.
|
||||
ASSIGN_OR_RETURN(flip_vertically_, ShouldFlipVertically(options, use_gpu));
|
||||
MP_ASSIGN_OR_RETURN(flip_vertically_, ShouldFlipVertically(options, use_gpu));
|
||||
|
||||
// Get row_major_matrix mode.
|
||||
row_major_matrix_ = options.row_major_matrix();
|
||||
|
|
|
@ -110,8 +110,8 @@ absl::Status TensorsToClassificationCalculator::Open(CalculatorContext* cc) {
|
|||
sort_by_descending_score_ = options.sort_by_descending_score();
|
||||
if (options.has_label_map_path()) {
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options.label_map_path()));
|
||||
MP_ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options.label_map_path()));
|
||||
std::string label_map_string;
|
||||
MP_RETURN_IF_ERROR(
|
||||
mediapipe::GetResourceContents(string_path, &label_map_string));
|
||||
|
|
|
@ -267,7 +267,8 @@ absl::Status TensorsToSegmentationCalculator::Process(CalculatorContext* cc) {
|
|||
{
|
||||
RET_CHECK(!input_tensors.empty());
|
||||
RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32);
|
||||
ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
MP_ASSIGN_OR_RETURN(auto hwc,
|
||||
GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
int tensor_channels = std::get<2>(hwc);
|
||||
typedef mediapipe::TensorsToSegmentationCalculatorOptions Options;
|
||||
switch (options_.activation()) {
|
||||
|
@ -330,7 +331,7 @@ absl::Status TensorsToSegmentationCalculator::ProcessCpu(
|
|||
// Get input streams, and dimensions.
|
||||
const auto& input_tensors =
|
||||
cc->Inputs().Tag(kTensorsTag).Get<std::vector<Tensor>>();
|
||||
ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
MP_ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
auto [tensor_height, tensor_width, tensor_channels] = hwc;
|
||||
int output_width = tensor_width, output_height = tensor_height;
|
||||
if (cc->Inputs().HasTag(kOutputSizeTag)) {
|
||||
|
@ -441,7 +442,7 @@ absl::Status TensorsToSegmentationCalculator::ProcessGpu(
|
|||
// Get input streams, and dimensions.
|
||||
const auto& input_tensors =
|
||||
cc->Inputs().Tag(kTensorsTag).Get<std::vector<Tensor>>();
|
||||
ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
MP_ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims));
|
||||
auto [tensor_height, tensor_width, tensor_channels] = hwc;
|
||||
int output_width = tensor_width, output_height = tensor_height;
|
||||
if (cc->Inputs().HasTag(kOutputSizeTag)) {
|
||||
|
|
|
@ -61,9 +61,10 @@ RunUniversalSentenceEncoderPreprocessorCalculator(absl::string_view text) {
|
|||
|
||||
std::string model_buffer =
|
||||
tasks::core::LoadBinaryContent(kTestModelPath.data());
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(
|
||||
model_buffer.data(), model_buffer.size()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::unique_ptr<ModelMetadataExtractor> metadata_extractor,
|
||||
ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(),
|
||||
model_buffer.size()));
|
||||
// Run the graph.
|
||||
CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(
|
||||
|
|
|
@ -151,7 +151,7 @@ class ObjectDetectionTensorsToDetectionsCalculator : public CalculatorBase {
|
|||
tf::Tensor input_num_detections_tensor =
|
||||
tf::Tensor(tf::DT_FLOAT, tf::TensorShape({0}));
|
||||
if (cc->Inputs().HasTag(kClasses)) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
input_num_detections_tensor,
|
||||
MaybeSqueezeDims(kNumDetections,
|
||||
cc->Inputs().Tag(kNumDetections).Get<tf::Tensor>()));
|
||||
|
@ -160,12 +160,12 @@ class ObjectDetectionTensorsToDetectionsCalculator : public CalculatorBase {
|
|||
RET_CHECK_EQ(input_num_detections_tensor.dtype(), tf::DT_FLOAT);
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto input_boxes_tensor,
|
||||
MaybeSqueezeDims(kBoxes, cc->Inputs().Tag(kBoxes).Get<tf::Tensor>()));
|
||||
RET_CHECK_EQ(input_boxes_tensor.dtype(), tf::DT_FLOAT);
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto input_scores_tensor,
|
||||
MaybeSqueezeDims(kScores, cc->Inputs().Tag(kScores).Get<tf::Tensor>()));
|
||||
RET_CHECK_EQ(input_scores_tensor.dtype(), tf::DT_FLOAT);
|
||||
|
@ -173,7 +173,7 @@ class ObjectDetectionTensorsToDetectionsCalculator : public CalculatorBase {
|
|||
tf::Tensor input_classes_tensor =
|
||||
tf::Tensor(tf::DT_FLOAT, tf::TensorShape({0}));
|
||||
if (cc->Inputs().HasTag(kClasses)) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
input_classes_tensor,
|
||||
MaybeSqueezeDims(kClasses,
|
||||
cc->Inputs().Tag(kClasses).Get<tf::Tensor>()));
|
||||
|
|
|
@ -489,8 +489,8 @@ absl::Status TfLiteInferenceCalculator::WriteKernelsToFile() {
|
|||
#if MEDIAPIPE_TFLITE_GL_INFERENCE && defined(MEDIAPIPE_ANDROID)
|
||||
if (use_kernel_caching_) {
|
||||
// Save kernel file.
|
||||
ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
|
||||
tflite_gpu_runner_->GetSerializedBinaryCache());
|
||||
MP_ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
|
||||
tflite_gpu_runner_->GetSerializedBinaryCache());
|
||||
std::string cache_str(kernel_cache.begin(), kernel_cache.end());
|
||||
MP_RETURN_IF_ERROR(
|
||||
mediapipe::file::SetContents(cached_kernel_filename_, cache_str));
|
||||
|
@ -733,7 +733,7 @@ absl::Status TfLiteInferenceCalculator::ReadKernelsFromFile() {
|
|||
absl::Status TfLiteInferenceCalculator::InitTFLiteGPURunner(
|
||||
CalculatorContext* cc) {
|
||||
#if MEDIAPIPE_TFLITE_GL_INFERENCE
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||
MP_ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||
const auto& model = *model_packet_.Get<TfLiteModelPtr>();
|
||||
|
||||
tflite::ops::builtin::BuiltinOpResolverWithoutDefaultDelegates
|
||||
|
@ -817,8 +817,8 @@ absl::Status TfLiteInferenceCalculator::InitTFLiteGPURunner(
|
|||
gpu_data_out_.resize(tflite_gpu_runner_->outputs_size());
|
||||
for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) {
|
||||
gpu_data_out_[i] = absl::make_unique<GPUData>();
|
||||
ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
||||
tflite_gpu_runner_->GetOutputElements(i));
|
||||
MP_ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
||||
tflite_gpu_runner_->GetOutputElements(i));
|
||||
// Create and bind input buffer.
|
||||
MP_RETURN_IF_ERROR(
|
||||
::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer<float>(
|
||||
|
@ -839,7 +839,7 @@ absl::Status TfLiteInferenceCalculator::LoadModel(CalculatorContext* cc) {
|
|||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||
MP_ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||
const auto& model = *model_packet_.Get<TfLiteModelPtr>();
|
||||
|
||||
tflite::ops::builtin::BuiltinOpResolverWithoutDefaultDelegates
|
||||
|
|
|
@ -101,8 +101,8 @@ absl::Status TfLiteTensorsToClassificationCalculator::Open(
|
|||
top_k_ = options_.top_k();
|
||||
if (options_.has_label_map_path()) {
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options_.label_map_path()));
|
||||
MP_ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options_.label_map_path()));
|
||||
std::string label_map_string;
|
||||
MP_RETURN_IF_ERROR(file::GetContents(string_path, &label_map_string));
|
||||
|
||||
|
|
|
@ -171,13 +171,13 @@ class AssociationCalculator : public CalculatorBase {
|
|||
// Compare this element with elements of the input collection. If this
|
||||
// element has high overlap with elements of the collection, remove
|
||||
// those elements from the collection and add this element.
|
||||
ASSIGN_OR_RETURN(auto cur_rect, GetRectangle(element));
|
||||
MP_ASSIGN_OR_RETURN(auto cur_rect, GetRectangle(element));
|
||||
|
||||
bool change_id = false;
|
||||
int new_elem_id = -1;
|
||||
|
||||
for (auto uit = current->begin(); uit != current->end();) {
|
||||
ASSIGN_OR_RETURN(auto prev_rect, GetRectangle(*uit));
|
||||
MP_ASSIGN_OR_RETURN(auto prev_rect, GetRectangle(*uit));
|
||||
if (CalculateIou(cur_rect, prev_rect) >
|
||||
options_.min_similarity_threshold()) {
|
||||
std::pair<bool, int> prev_id = GetId(*uit);
|
||||
|
|
|
@ -83,8 +83,8 @@ absl::Status DetectionLabelIdToTextCalculator::Open(CalculatorContext* cc) {
|
|||
<< "Only can set one of the following fields in the CalculatorOptions: "
|
||||
"label_map_path, label, and label_items.";
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options.label_map_path()));
|
||||
MP_ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options.label_map_path()));
|
||||
std::string label_map_string;
|
||||
MP_RETURN_IF_ERROR(
|
||||
mediapipe::GetResourceContents(string_path, &label_map_string));
|
||||
|
|
|
@ -96,10 +96,10 @@ absl::StatusOr<LocationData::Format> GetLocationDataFormat(
|
|||
std::vector<Detection>& detections) {
|
||||
RET_CHECK(!detections.empty());
|
||||
LocationData::Format output_format;
|
||||
ASSIGN_OR_RETURN(output_format, GetLocationDataFormat(detections[0]));
|
||||
MP_ASSIGN_OR_RETURN(output_format, GetLocationDataFormat(detections[0]));
|
||||
for (int i = 1; i < detections.size(); ++i) {
|
||||
ASSIGN_OR_RETURN(LocationData::Format format,
|
||||
GetLocationDataFormat(detections[i]));
|
||||
MP_ASSIGN_OR_RETURN(LocationData::Format format,
|
||||
GetLocationDataFormat(detections[i]));
|
||||
if (output_format != format) {
|
||||
return absl::InvalidArgumentError(
|
||||
"Input detections have different location data formats.");
|
||||
|
@ -243,8 +243,8 @@ class DetectionTransformationCalculator : public Node {
|
|||
OutputEmptyDetections(cc);
|
||||
return absl::OkStatus();
|
||||
}
|
||||
ASSIGN_OR_RETURN(input_location_data_format,
|
||||
GetLocationDataFormat(transformed_detections));
|
||||
MP_ASSIGN_OR_RETURN(input_location_data_format,
|
||||
GetLocationDataFormat(transformed_detections));
|
||||
for (Detection& detection : transformed_detections) {
|
||||
MP_RETURN_IF_ERROR(ConvertBoundingBox(image_size, &detection));
|
||||
}
|
||||
|
@ -254,8 +254,8 @@ class DetectionTransformationCalculator : public Node {
|
|||
OutputEmptyDetections(cc);
|
||||
return absl::OkStatus();
|
||||
}
|
||||
ASSIGN_OR_RETURN(input_location_data_format,
|
||||
GetLocationDataFormat(kInDetection(cc).Get()));
|
||||
MP_ASSIGN_OR_RETURN(input_location_data_format,
|
||||
GetLocationDataFormat(kInDetection(cc).Get()));
|
||||
MP_RETURN_IF_ERROR(
|
||||
ConvertBoundingBox(image_size, &transformed_detection));
|
||||
transformed_detections.push_back(transformed_detection);
|
||||
|
|
|
@ -137,8 +137,8 @@ class LandmarksRefinementCalculatorImpl
|
|||
}
|
||||
|
||||
// Validate indexes mapping and get total number of refined landmarks.
|
||||
ASSIGN_OR_RETURN(n_refined_landmarks_,
|
||||
GetNumberOfRefinedLandmarks(options_.refinement()));
|
||||
MP_ASSIGN_OR_RETURN(n_refined_landmarks_,
|
||||
GetNumberOfRefinedLandmarks(options_.refinement()));
|
||||
|
||||
// Validate that number of refinements and landmark streams is the same.
|
||||
RET_CHECK_EQ(kLandmarks(cc).Count(), options_.refinement_size())
|
||||
|
|
|
@ -43,9 +43,10 @@ class LandmarksSmoothingCalculatorImpl
|
|||
: public NodeImpl<LandmarksSmoothingCalculator> {
|
||||
public:
|
||||
absl::Status Open(CalculatorContext* cc) override {
|
||||
ASSIGN_OR_RETURN(landmarks_filter_,
|
||||
InitializeLandmarksFilter(
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
landmarks_filter_,
|
||||
InitializeLandmarksFilter(
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
|
|
@ -348,7 +348,8 @@ absl::StatusOr<LandmarksFilter*> MultiLandmarkFilters::GetOrCreate(
|
|||
return it->second.get();
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(auto landmarks_filter, InitializeLandmarksFilter(options));
|
||||
MP_ASSIGN_OR_RETURN(auto landmarks_filter,
|
||||
InitializeLandmarksFilter(options));
|
||||
filters_[tracking_id] = std::move(landmarks_filter);
|
||||
return filters_[tracking_id].get();
|
||||
}
|
||||
|
|
|
@ -119,10 +119,10 @@ class LandmarksTransformationCalculatorImpl
|
|||
|
||||
for (auto& transformation : options.transformation()) {
|
||||
if (transformation.has_normalize_translation()) {
|
||||
ASSIGN_OR_RETURN(landmarks, NormalizeTranslation(landmarks));
|
||||
MP_ASSIGN_OR_RETURN(landmarks, NormalizeTranslation(landmarks));
|
||||
} else if (transformation.has_flip_axis()) {
|
||||
ASSIGN_OR_RETURN(landmarks,
|
||||
FlipAxis(landmarks, transformation.flip_axis()));
|
||||
MP_ASSIGN_OR_RETURN(landmarks,
|
||||
FlipAxis(landmarks, transformation.flip_axis()));
|
||||
} else {
|
||||
RET_CHECK_FAIL() << "Unknown landmarks transformation";
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ class LocalFileContentsCalculator : public CalculatorBase {
|
|||
++input_id, ++output_id) {
|
||||
std::string file_path =
|
||||
cc->InputSidePackets().Get(input_id).Get<std::string>();
|
||||
ASSIGN_OR_RETURN(file_path, PathToResourceAsFile(file_path));
|
||||
MP_ASSIGN_OR_RETURN(file_path, PathToResourceAsFile(file_path));
|
||||
|
||||
std::string contents;
|
||||
MP_RETURN_IF_ERROR(GetResourceContents(
|
||||
|
|
|
@ -83,10 +83,11 @@ class MultiLandmarksSmoothingCalculatorImpl
|
|||
image_width, image_height);
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(auto* landmarks_filter,
|
||||
multi_filters_.GetOrCreate(
|
||||
tracking_ids[i],
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto* landmarks_filter,
|
||||
multi_filters_.GetOrCreate(
|
||||
tracking_ids[i],
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
|
||||
LandmarkList out_landmarks;
|
||||
MP_RETURN_IF_ERROR(landmarks_filter->Apply(in_landmarks, timestamp,
|
||||
|
|
|
@ -74,10 +74,11 @@ class MultiWorldLandmarksSmoothingCalculatorImpl
|
|||
object_scale = GetObjectScale(object_scale_roi_vec.value()[i]);
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(auto* landmarks_filter,
|
||||
multi_filters_.GetOrCreate(
|
||||
tracking_ids[i],
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto* landmarks_filter,
|
||||
multi_filters_.GetOrCreate(
|
||||
tracking_ids[i],
|
||||
cc->Options<LandmarksSmoothingCalculatorOptions>()));
|
||||
|
||||
LandmarkList out_landmarks;
|
||||
MP_RETURN_IF_ERROR(landmarks_filter->Apply(in_landmarks, timestamp,
|
||||
|
|
|
@ -77,7 +77,7 @@ class RefineLandmarksFromHeatmapCalculatorImpl
|
|||
const auto& options =
|
||||
cc->Options<mediapipe::RefineLandmarksFromHeatmapCalculatorOptions>();
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto out_lms,
|
||||
RefineLandmarksFromHeatMap(
|
||||
in_lms, hm_raw, hm_tensor.shape().dims, options.kernel_size(),
|
||||
|
@ -108,7 +108,7 @@ absl::StatusOr<mediapipe::NormalizedLandmarkList> RefineLandmarksFromHeatMap(
|
|||
const float* heatmap_raw_data, const std::vector<int>& heatmap_dims,
|
||||
int kernel_size, float min_confidence_to_refine, bool refine_presence,
|
||||
bool refine_visibility) {
|
||||
ASSIGN_OR_RETURN(auto hm_dims, GetHwcFromDims(heatmap_dims));
|
||||
MP_ASSIGN_OR_RETURN(auto hm_dims, GetHwcFromDims(heatmap_dims));
|
||||
auto [hm_height, hm_width, hm_channels] = hm_dims;
|
||||
|
||||
RET_CHECK_EQ(in_lms.landmark_size(), hm_channels)
|
||||
|
|
|
@ -73,7 +73,8 @@ absl::Status TimedBoxListIdToLabelCalculator::Open(CalculatorContext* cc) {
|
|||
cc->Options<::mediapipe::TimedBoxListIdToLabelCalculatorOptions>();
|
||||
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(options.label_map_path()));
|
||||
MP_ASSIGN_OR_RETURN(string_path,
|
||||
PathToResourceAsFile(options.label_map_path()));
|
||||
std::string label_map_string;
|
||||
MP_RETURN_IF_ERROR(file::GetContents(string_path, &label_map_string));
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ absl::Status ToImageCalculator::UpdateContract(CalculatorContract* cc) {
|
|||
}
|
||||
|
||||
absl::Status ToImageCalculator::Process(CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(auto output, GetInputImage(cc));
|
||||
MP_ASSIGN_OR_RETURN(auto output, GetInputImage(cc));
|
||||
kOut(cc).Send(output.At(cc->InputTimestamp()));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
|
|
@ -227,7 +227,7 @@ absl::Status TopKScoresCalculator::Process(CalculatorContext* cc) {
|
|||
|
||||
absl::Status TopKScoresCalculator::LoadLabelmap(std::string label_map_path) {
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(label_map_path));
|
||||
MP_ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(label_map_path));
|
||||
std::string label_map_string;
|
||||
MP_RETURN_IF_ERROR(file::GetContents(string_path, &label_map_string));
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ absl::Status BoxDetectorCalculator::Open(CalculatorContext* cc) {
|
|||
|
||||
for (const auto& filename : options_.index_proto_filename()) {
|
||||
std::string string_path;
|
||||
ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(filename));
|
||||
MP_ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(filename));
|
||||
std::string index_string;
|
||||
MP_RETURN_IF_ERROR(file::GetContents(string_path, &index_string));
|
||||
BoxDetectorIndex predefined_index;
|
||||
|
|
|
@ -478,7 +478,7 @@ absl::Status MotionAnalysisCalculator::Process(CalculatorContext* cc) {
|
|||
|
||||
// Fill in timestamps we process.
|
||||
if (!selection_stream->Value().IsEmpty()) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
frame_selection_result,
|
||||
selection_stream->Value().ConsumeOrCopy<FrameSelectionResult>());
|
||||
use_frame = true;
|
||||
|
|
|
@ -87,8 +87,8 @@ absl::Status RunMPPGraph() {
|
|||
}
|
||||
|
||||
ABSL_LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
ABSL_LOG(INFO) << "Start grabbing and processing frames.";
|
||||
|
|
|
@ -79,8 +79,8 @@ absl::Status RunMPPGraph() {
|
|||
}
|
||||
|
||||
ABSL_LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
ABSL_LOG(INFO) << "Start grabbing and processing frames.";
|
||||
|
|
|
@ -62,7 +62,7 @@ absl::Status RunMPPGraph() {
|
|||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
|
||||
ABSL_LOG(INFO) << "Initialize the GPU.";
|
||||
ASSIGN_OR_RETURN(auto gpu_resources, mediapipe::GpuResources::Create());
|
||||
MP_ASSIGN_OR_RETURN(auto gpu_resources, mediapipe::GpuResources::Create());
|
||||
MP_RETURN_IF_ERROR(graph.SetGpuResources(std::move(gpu_resources)));
|
||||
mediapipe::GlCalculatorHelper gpu_helper;
|
||||
gpu_helper.InitializeForTest(graph.GetGpuResources().get());
|
||||
|
@ -89,8 +89,8 @@ absl::Status RunMPPGraph() {
|
|||
}
|
||||
|
||||
ABSL_LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
ABSL_LOG(INFO) << "Start grabbing and processing frames.";
|
||||
|
|
|
@ -42,8 +42,8 @@ absl::Status PrintHelloWorld() {
|
|||
|
||||
CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
ASSIGN_OR_RETURN(OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller("out"));
|
||||
MP_ASSIGN_OR_RETURN(OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller("out"));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
// Give 10 input packets that contains the same string "Hello World!".
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
|
|
|
@ -57,16 +57,16 @@ absl::StatusOr<std::string> ReadFileToString(const std::string& file_path) {
|
|||
|
||||
absl::Status ProcessImage(std::unique_ptr<mediapipe::CalculatorGraph> graph) {
|
||||
ABSL_LOG(INFO) << "Load the image.";
|
||||
ASSIGN_OR_RETURN(const std::string raw_image,
|
||||
ReadFileToString(absl::GetFlag(FLAGS_input_image_path)));
|
||||
MP_ASSIGN_OR_RETURN(const std::string raw_image,
|
||||
ReadFileToString(absl::GetFlag(FLAGS_input_image_path)));
|
||||
|
||||
ABSL_LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller output_image_poller,
|
||||
graph->AddOutputStreamPoller(kOutputImageStream));
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller left_iris_depth_poller,
|
||||
graph->AddOutputStreamPoller(kLeftIrisDepthMmStream));
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller right_iris_depth_poller,
|
||||
graph->AddOutputStreamPoller(kRightIrisDepthMmStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller output_image_poller,
|
||||
graph->AddOutputStreamPoller(kOutputImageStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller left_iris_depth_poller,
|
||||
graph->AddOutputStreamPoller(kLeftIrisDepthMmStream));
|
||||
MP_ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller right_iris_depth_poller,
|
||||
graph->AddOutputStreamPoller(kRightIrisDepthMmStream));
|
||||
MP_RETURN_IF_ERROR(graph->StartRun({}));
|
||||
|
||||
// Send image packet into the graph.
|
||||
|
|
|
@ -83,8 +83,8 @@ absl::Status OutputSidePacketsToLocalFile(mediapipe::CalculatorGraph& graph) {
|
|||
std::vector<std::string> side_packet_names =
|
||||
absl::StrSplit(absl::GetFlag(FLAGS_output_side_packets), ',');
|
||||
for (const std::string& side_packet_name : side_packet_names) {
|
||||
ASSIGN_OR_RETURN(auto status_or_packet,
|
||||
graph.GetOutputSidePacket(side_packet_name));
|
||||
MP_ASSIGN_OR_RETURN(auto status_or_packet,
|
||||
graph.GetOutputSidePacket(side_packet_name));
|
||||
file << absl::StrCat(side_packet_name, ":",
|
||||
status_or_packet.Get<std::string>(), "\n");
|
||||
}
|
||||
|
@ -125,8 +125,8 @@ absl::Status RunMPPGraph() {
|
|||
MP_RETURN_IF_ERROR(graph.Initialize(config, input_side_packets));
|
||||
if (!absl::GetFlag(FLAGS_output_stream).empty() &&
|
||||
!absl::GetFlag(FLAGS_output_stream_file).empty()) {
|
||||
ASSIGN_OR_RETURN(auto poller, graph.AddOutputStreamPoller(
|
||||
absl::GetFlag(FLAGS_output_stream)));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, graph.AddOutputStreamPoller(
|
||||
absl::GetFlag(FLAGS_output_stream)));
|
||||
ABSL_LOG(INFO) << "Start running the calculator graph.";
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
MP_RETURN_IF_ERROR(OutputStreamToLocalFile(poller));
|
||||
|
|
|
@ -530,9 +530,9 @@ struct ConsumerNode : public Node {
|
|||
MEDIAPIPE_NODE_CONTRACT(kInt, kGeneric, kOneOf);
|
||||
|
||||
absl::Status Process(CalculatorContext* cc) override {
|
||||
ASSIGN_OR_RETURN(auto maybe_int, kInt(cc).Consume());
|
||||
ASSIGN_OR_RETURN(auto maybe_float, kGeneric(cc).Consume<float>());
|
||||
ASSIGN_OR_RETURN(auto maybe_int2, kOneOf(cc).Consume<int>());
|
||||
MP_ASSIGN_OR_RETURN(auto maybe_int, kInt(cc).Consume());
|
||||
MP_ASSIGN_OR_RETURN(auto maybe_float, kGeneric(cc).Consume<float>());
|
||||
MP_ASSIGN_OR_RETURN(auto maybe_int2, kOneOf(cc).Consume<int>());
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
|
|
@ -207,7 +207,7 @@ absl::Status CalculatorGraph::InitializeStreams() {
|
|||
|
||||
// Initialize GraphInputStreams.
|
||||
int graph_input_stream_count = 0;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto input_tag_map,
|
||||
tool::TagMap::Create(validated_graph_->Config().input_stream()));
|
||||
for (const auto& stream_name : input_tag_map->Names()) {
|
||||
|
@ -371,7 +371,7 @@ absl::Status CalculatorGraph::InitializeExecutors() {
|
|||
"CalculatorGraph::SetExecutor() call.";
|
||||
}
|
||||
// clang-format off
|
||||
ASSIGN_OR_RETURN(Executor* executor,
|
||||
MP_ASSIGN_OR_RETURN(Executor* executor,
|
||||
ExecutorRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(),
|
||||
executor_config.type(), executor_config.options()));
|
||||
|
@ -1335,7 +1335,7 @@ absl::Status CalculatorGraph::CreateDefaultThreadPool(
|
|||
}
|
||||
options->set_num_threads(num_threads);
|
||||
// clang-format off
|
||||
ASSIGN_OR_RETURN(Executor* executor,
|
||||
MP_ASSIGN_OR_RETURN(Executor* executor,
|
||||
ThreadPoolExecutor::Create(extendable_options));
|
||||
// clang-format on
|
||||
return SetExecutorInternal("", std::shared_ptr<Executor>(executor));
|
||||
|
|
|
@ -303,14 +303,15 @@ absl::Status CalculatorNode::InitializeInputStreamHandler(
|
|||
const ProtoString& input_stream_handler_name =
|
||||
handler_config.input_stream_handler();
|
||||
RET_CHECK(!input_stream_handler_name.empty());
|
||||
ASSIGN_OR_RETURN(input_stream_handler_,
|
||||
InputStreamHandlerRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(), input_stream_handler_name,
|
||||
input_stream_types.TagMap(),
|
||||
&calculator_context_manager_, handler_config.options(),
|
||||
/*calculator_run_in_parallel=*/max_in_flight_ > 1),
|
||||
_ << "\"" << input_stream_handler_name
|
||||
<< "\" is not a registered input stream handler.");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
input_stream_handler_,
|
||||
InputStreamHandlerRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(), input_stream_handler_name,
|
||||
input_stream_types.TagMap(), &calculator_context_manager_,
|
||||
handler_config.options(),
|
||||
/*calculator_run_in_parallel=*/max_in_flight_ > 1),
|
||||
_ << "\"" << input_stream_handler_name
|
||||
<< "\" is not a registered input stream handler.");
|
||||
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
@ -321,14 +322,15 @@ absl::Status CalculatorNode::InitializeOutputStreamHandler(
|
|||
const ProtoString& output_stream_handler_name =
|
||||
handler_config.output_stream_handler();
|
||||
RET_CHECK(!output_stream_handler_name.empty());
|
||||
ASSIGN_OR_RETURN(output_stream_handler_,
|
||||
OutputStreamHandlerRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(), output_stream_handler_name,
|
||||
output_stream_types.TagMap(),
|
||||
&calculator_context_manager_, handler_config.options(),
|
||||
/*calculator_run_in_parallel=*/max_in_flight_ > 1),
|
||||
_ << "\"" << output_stream_handler_name
|
||||
<< "\" is not a registered output stream handler.");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
output_stream_handler_,
|
||||
OutputStreamHandlerRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(), output_stream_handler_name,
|
||||
output_stream_types.TagMap(), &calculator_context_manager_,
|
||||
handler_config.options(),
|
||||
/*calculator_run_in_parallel=*/max_in_flight_ > 1),
|
||||
_ << "\"" << output_stream_handler_name
|
||||
<< "\" is not a registered output stream handler.");
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
@ -420,7 +422,7 @@ absl::Status CalculatorNode::PrepareForRun(
|
|||
MP_RETURN_IF_ERROR(calculator_context_manager_.PrepareForRun(std::bind(
|
||||
&CalculatorNode::ConnectShardsToStreams, this, std::placeholders::_1)));
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto calculator_factory,
|
||||
CalculatorBaseRegistry::CreateByNameInNamespace(
|
||||
validated_graph_->Package(), calculator_state_->CalculatorType()));
|
||||
|
|
|
@ -111,20 +111,20 @@ absl::Status CalculatorRunner::InitializeFromNodeConfig(
|
|||
node_config_.mutable_input_side_packet());
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(auto input_map,
|
||||
tool::TagMap::Create(node_config_.input_stream()));
|
||||
MP_ASSIGN_OR_RETURN(auto input_map,
|
||||
tool::TagMap::Create(node_config_.input_stream()));
|
||||
inputs_ = absl::make_unique<StreamContentsSet>(input_map);
|
||||
|
||||
ASSIGN_OR_RETURN(auto output_map,
|
||||
tool::TagMap::Create(node_config_.output_stream()));
|
||||
MP_ASSIGN_OR_RETURN(auto output_map,
|
||||
tool::TagMap::Create(node_config_.output_stream()));
|
||||
outputs_ = absl::make_unique<StreamContentsSet>(output_map);
|
||||
|
||||
ASSIGN_OR_RETURN(auto input_side_map,
|
||||
tool::TagMap::Create(node_config_.input_side_packet()));
|
||||
MP_ASSIGN_OR_RETURN(auto input_side_map,
|
||||
tool::TagMap::Create(node_config_.input_side_packet()));
|
||||
input_side_packets_ = absl::make_unique<PacketSet>(input_side_map);
|
||||
|
||||
ASSIGN_OR_RETURN(auto output_side_map,
|
||||
tool::TagMap::Create(node_config_.output_side_packet()));
|
||||
MP_ASSIGN_OR_RETURN(auto output_side_map,
|
||||
tool::TagMap::Create(node_config_.output_side_packet()));
|
||||
output_side_packets_ = absl::make_unique<PacketSet>(output_side_map);
|
||||
|
||||
return absl::OkStatus();
|
||||
|
@ -353,7 +353,7 @@ absl::Status CalculatorRunner::Run() {
|
|||
node_config_.output_side_packet(i), &tag, &index, &name));
|
||||
Packet& contents = output_side_packets_->Get(
|
||||
tag, (index == -1) ? ++positional_index : index);
|
||||
ASSIGN_OR_RETURN(contents, graph_->GetOutputSidePacket(name));
|
||||
MP_ASSIGN_OR_RETURN(contents, graph_->GetOutputSidePacket(name));
|
||||
}
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
|
|
@ -97,27 +97,27 @@
|
|||
//
|
||||
// Interface:
|
||||
//
|
||||
// ASSIGN_OR_RETURN(lhs, rexpr)
|
||||
// ASSIGN_OR_RETURN(lhs, rexpr, error_expression);
|
||||
// MP_ASSIGN_OR_RETURN(lhs, rexpr)
|
||||
// MP_ASSIGN_OR_RETURN(lhs, rexpr, error_expression);
|
||||
//
|
||||
// WARNING: expands into multiple statements; it cannot be used in a single
|
||||
// statement (e.g. as the body of an if statement without {})!
|
||||
//
|
||||
// Example: Declaring and initializing a new variable (ValueType can be anything
|
||||
// that can be initialized with assignment, including references):
|
||||
// ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(arg));
|
||||
// MP_ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(arg));
|
||||
//
|
||||
// Example: Assigning to an existing variable:
|
||||
// ValueType value;
|
||||
// ASSIGN_OR_RETURN(value, MaybeGetValue(arg));
|
||||
// MP_ASSIGN_OR_RETURN(value, MaybeGetValue(arg));
|
||||
//
|
||||
// Example: Assigning to an expression with side effects:
|
||||
// MyProto data;
|
||||
// ASSIGN_OR_RETURN(*data.mutable_str(), MaybeGetValue(arg));
|
||||
// MP_ASSIGN_OR_RETURN(*data.mutable_str(), MaybeGetValue(arg));
|
||||
// // No field "str" is added on error.
|
||||
//
|
||||
// Example: Assigning to a std::unique_ptr.
|
||||
// ASSIGN_OR_RETURN(std::unique_ptr<T> ptr, MaybeGetPtr(arg));
|
||||
// MP_ASSIGN_OR_RETURN(std::unique_ptr<T> ptr, MaybeGetPtr(arg));
|
||||
//
|
||||
// If passed, the `error_expression` is evaluated to produce the return
|
||||
// value. The expression may reference any variable visible in scope, as
|
||||
|
@ -128,16 +128,16 @@
|
|||
// returnable by the function, including (void). For example:
|
||||
//
|
||||
// Example: Adjusting the error message.
|
||||
// ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(query),
|
||||
// MP_ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(query),
|
||||
// _ << "while processing query " << query.DebugString());
|
||||
//
|
||||
// Example: Logging the error on failure.
|
||||
// ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(query), _.LogError());
|
||||
// MP_ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(query), _.LogError());
|
||||
//
|
||||
#define ASSIGN_OR_RETURN(...) \
|
||||
MP_STATUS_MACROS_IMPL_GET_VARIADIC_( \
|
||||
(__VA_ARGS__, MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_3_, \
|
||||
MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_2_)) \
|
||||
#define MP_ASSIGN_OR_RETURN(...) \
|
||||
MP_STATUS_MACROS_IMPL_GET_VARIADIC_( \
|
||||
(__VA_ARGS__, MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_3_, \
|
||||
MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_2_)) \
|
||||
(__VA_ARGS__)
|
||||
|
||||
// =================================================================
|
||||
|
@ -150,16 +150,16 @@
|
|||
#define MP_STATUS_MACROS_IMPL_GET_VARIADIC_(args) \
|
||||
MP_STATUS_MACROS_IMPL_GET_VARIADIC_HELPER_ args
|
||||
|
||||
#define MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_2_(lhs, rexpr) \
|
||||
MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_( \
|
||||
#define MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_2_(lhs, rexpr) \
|
||||
MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_( \
|
||||
MP_STATUS_MACROS_IMPL_CONCAT_(_status_or_value, __LINE__), lhs, rexpr, \
|
||||
return mediapipe::StatusBuilder( \
|
||||
std::move(MP_STATUS_MACROS_IMPL_CONCAT_(_status_or_value, __LINE__)) \
|
||||
.status(), \
|
||||
MEDIAPIPE_LOC))
|
||||
#define MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_3_(lhs, rexpr, \
|
||||
error_expression) \
|
||||
MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_( \
|
||||
#define MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_3_(lhs, rexpr, \
|
||||
error_expression) \
|
||||
MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_( \
|
||||
MP_STATUS_MACROS_IMPL_CONCAT_(_status_or_value, __LINE__), lhs, rexpr, \
|
||||
mediapipe::StatusBuilder _( \
|
||||
std::move(MP_STATUS_MACROS_IMPL_CONCAT_(_status_or_value, __LINE__)) \
|
||||
|
@ -167,12 +167,12 @@
|
|||
MEDIAPIPE_LOC); \
|
||||
(void)_; /* error_expression is allowed to not use this variable */ \
|
||||
return (error_expression))
|
||||
#define MP_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_(statusor, lhs, rexpr, \
|
||||
error_expression) \
|
||||
auto statusor = (rexpr); \
|
||||
if (ABSL_PREDICT_FALSE(!statusor.ok())) { \
|
||||
error_expression; \
|
||||
} \
|
||||
#define MP_STATUS_MACROS_IMPL_MP_ASSIGN_OR_RETURN_(statusor, lhs, rexpr, \
|
||||
error_expression) \
|
||||
auto statusor = (rexpr); \
|
||||
if (ABSL_PREDICT_FALSE(!statusor.ok())) { \
|
||||
error_expression; \
|
||||
} \
|
||||
lhs = std::move(statusor).value()
|
||||
|
||||
// Internal helper for concatenating macro values.
|
||||
|
|
|
@ -29,7 +29,7 @@ absl::Status InputSidePacketHandler::PrepareForRun(
|
|||
std::function<void(absl::Status)> error_callback) {
|
||||
int missing_input_side_packet_count;
|
||||
prev_input_side_packets_ = std::move(input_side_packets_);
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
input_side_packets_,
|
||||
tool::FillPacketSet(*input_side_packet_types, all_side_packets,
|
||||
&missing_input_side_packet_count));
|
||||
|
|
|
@ -28,8 +28,8 @@ class OutputStreamPoller {
|
|||
OutputStreamPoller(const OutputStreamPoller&) = delete;
|
||||
OutputStreamPoller& operator=(const OutputStreamPoller&) = delete;
|
||||
OutputStreamPoller(OutputStreamPoller&&) = default;
|
||||
// Move assignment needs to be explicitly defaulted to allow ASSIGN_OR_RETURN
|
||||
// on `StatusOr<OutputStreamPoller>`.
|
||||
// Move assignment needs to be explicitly defaulted to allow
|
||||
// MP_ASSIGN_OR_RETURN on `StatusOr<OutputStreamPoller>`.
|
||||
OutputStreamPoller& operator=(OutputStreamPoller&&) = default;
|
||||
|
||||
// Resets OutputStramPollerImpl and cleans the internal packet queue.
|
||||
|
|
|
@ -55,7 +55,7 @@ const HolderBase* GetHolder(const Packet& packet) {
|
|||
|
||||
absl::StatusOr<Packet> PacketFromDynamicProto(const std::string& type_name,
|
||||
const std::string& serialized) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto message_holder,
|
||||
packet_internal::MessageHolderRegistry::CreateByName(type_name));
|
||||
auto* message =
|
||||
|
|
|
@ -150,16 +150,16 @@ class Packet {
|
|||
// general recommendation is to avoid calling this function.
|
||||
//
|
||||
// Example usage:
|
||||
// ASSIGN_OR_RETURN(std::unique_ptr<Detection> detection,
|
||||
// MP_ASSIGN_OR_RETURN(std::unique_ptr<Detection> detection,
|
||||
// p.ConsumeOrCopy<Detection>());
|
||||
// // The unique_ptr type can be omitted with auto.
|
||||
// ASSIGN_OR_RETURN(auto detection, p.ConsumeOrCopy<Detection>());
|
||||
// If you would like to crash on failure (prefer ASSIGN_OR_RETURN):
|
||||
// MP_ASSIGN_OR_RETURN(auto detection, p.ConsumeOrCopy<Detection>());
|
||||
// If you would like to crash on failure (prefer MP_ASSIGN_OR_RETURN):
|
||||
// auto detection = p.ConsumeOrCopy<Detection>().value();
|
||||
// // In functions which do not return absl::Status use an adaptor
|
||||
// // function as the third argument to ASSIGN_OR_RETURN. In tests,
|
||||
// // function as the third argument to MP_ASSIGN_OR_RETURN. In tests,
|
||||
// // use an adaptor which returns void.
|
||||
// ASSIGN_OR_RETURN(auto detection, p.ConsumeOrCopy<Detection>(),
|
||||
// MP_ASSIGN_OR_RETURN(auto detection, p.ConsumeOrCopy<Detection>(),
|
||||
// _.With([](const absl::Status& status) {
|
||||
// MP_EXPECT_OK(status);
|
||||
// // Use CHECK_OK to crash and report a usable line
|
||||
|
|
|
@ -97,7 +97,7 @@ absl::Status Generate(const ValidatedGraphConfig& validated_graph,
|
|||
validated_graph.Config().packet_generator(generator_index);
|
||||
const auto& generator_name = generator_config.packet_generator();
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto static_access,
|
||||
internal::StaticAccessToGeneratorRegistry::CreateByNameInNamespace(
|
||||
validated_graph.Package(), generator_name),
|
||||
|
|
|
@ -246,7 +246,7 @@ absl::Status GraphProfiler::Start(mediapipe::Executor* executor) {
|
|||
if (is_tracing_ && IsTraceIntervalEnabled(profiler_config_, tracer()) &&
|
||||
executor != nullptr) {
|
||||
// Inform the user via logging the path to the trace logs.
|
||||
ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||
MP_ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||
// Check that we can actually write to it.
|
||||
auto status =
|
||||
file::SetContents(absl::StrCat(trace_log_path, "trace_writing_check"),
|
||||
|
@ -655,7 +655,8 @@ absl::StatusOr<std::string> GraphProfiler::GetTraceLogPath() {
|
|||
"Trace log writing is disabled, unable to get trace_log_path.");
|
||||
}
|
||||
if (profiler_config_.trace_log_path().empty()) {
|
||||
ASSIGN_OR_RETURN(std::string directory_path, GetDefaultTraceLogDirectory());
|
||||
MP_ASSIGN_OR_RETURN(std::string directory_path,
|
||||
GetDefaultTraceLogDirectory());
|
||||
std::string trace_log_path =
|
||||
absl::StrCat(directory_path, "/", kDefaultLogFilePrefix);
|
||||
return trace_log_path;
|
||||
|
@ -705,7 +706,7 @@ absl::Status GraphProfiler::WriteProfile() {
|
|||
// Logging is disabled, so we can exit writing without error.
|
||||
return absl::OkStatus();
|
||||
}
|
||||
ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||
MP_ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||
int log_interval_count = GetLogIntervalCount(profiler_config_);
|
||||
int log_file_count = GetLogFileCount(profiler_config_);
|
||||
GraphProfile profile;
|
||||
|
|
|
@ -33,7 +33,7 @@ absl::StatusOr<std::string> GetLogDirectory() {
|
|||
}
|
||||
|
||||
absl::StatusOr<std::string> PathToLogFile(const std::string& path) {
|
||||
ASSIGN_OR_RETURN(std::string log_dir, GetLogDirectory());
|
||||
MP_ASSIGN_OR_RETURN(std::string log_dir, GetLogDirectory());
|
||||
std::string result = file::JoinPath(log_dir, path);
|
||||
MP_RETURN_IF_ERROR(
|
||||
mediapipe::file::RecursivelyCreateDir(file::Dirname(result)));
|
||||
|
|
|
@ -175,7 +175,8 @@ StatusOr<int> FindExtensionIndex(const FieldData& message_data,
|
|||
}
|
||||
std::string& extension_type = entry->extension_type;
|
||||
std::vector<FieldData> field_values;
|
||||
ASSIGN_OR_RETURN(field_values, GetFieldValues(message_data, *entry->field));
|
||||
MP_ASSIGN_OR_RETURN(field_values,
|
||||
GetFieldValues(message_data, *entry->field));
|
||||
for (int i = 0; i < field_values.size(); ++i) {
|
||||
FieldData extension = ParseProtobufAny(field_values[i]);
|
||||
if (extension_type == "*" ||
|
||||
|
@ -275,7 +276,7 @@ absl::Status FindExtension(const FieldData& message_data,
|
|||
}
|
||||
|
||||
// For repeated protobuf::Any, find the index for the extension_type.
|
||||
ASSIGN_OR_RETURN(int index, FindExtensionIndex(message_data, entry));
|
||||
MP_ASSIGN_OR_RETURN(int index, FindExtensionIndex(message_data, entry));
|
||||
if (index != -1) {
|
||||
entry->index = index;
|
||||
return absl::OkStatus();
|
||||
|
@ -367,7 +368,7 @@ absl::StatusOr<std::vector<FieldData>> GetFieldValues(
|
|||
MP_RETURN_IF_ERROR(FindExtension(message_data, &head));
|
||||
}
|
||||
RET_CHECK_NE(head.field, nullptr);
|
||||
ASSIGN_OR_RETURN(results, GetFieldValues(message_data, *head.field));
|
||||
MP_ASSIGN_OR_RETURN(results, GetFieldValues(message_data, *head.field));
|
||||
if (IsProtobufAny(head.field)) {
|
||||
for (int i = 0; i < results.size(); ++i) {
|
||||
results[i] = ParseProtobufAny(results[i]);
|
||||
|
@ -381,7 +382,7 @@ absl::StatusOr<std::vector<FieldData>> GetFieldValues(
|
|||
}
|
||||
if (!tail.empty()) {
|
||||
FieldData child = results.at(index);
|
||||
ASSIGN_OR_RETURN(results, GetFieldValues(child, tail));
|
||||
MP_ASSIGN_OR_RETURN(results, GetFieldValues(child, tail));
|
||||
} else if (index > -1) {
|
||||
FieldData child = results.at(index);
|
||||
results.clear();
|
||||
|
@ -394,7 +395,7 @@ absl::StatusOr<std::vector<FieldData>> GetFieldValues(
|
|||
absl::StatusOr<FieldData> GetField(const FieldData& message_data,
|
||||
const FieldPath& field_path) {
|
||||
std::vector<FieldData> results;
|
||||
ASSIGN_OR_RETURN(results, GetFieldValues(message_data, field_path));
|
||||
MP_ASSIGN_OR_RETURN(results, GetFieldValues(message_data, field_path));
|
||||
if (results.empty()) {
|
||||
FieldPathEntry tail = field_path.back();
|
||||
return absl::OutOfRangeError(absl::StrCat(
|
||||
|
@ -452,12 +453,12 @@ absl::Status MergeFieldValues(FieldData& message_data,
|
|||
: field_path.back().field->type();
|
||||
std::vector<FieldData> results = values;
|
||||
std::vector<FieldData> prevs;
|
||||
ASSIGN_OR_RETURN(prevs, GetFieldValues(message_data, field_path));
|
||||
MP_ASSIGN_OR_RETURN(prevs, GetFieldValues(message_data, field_path));
|
||||
if (field_type == FieldType::TYPE_MESSAGE) {
|
||||
for (int i = 0; i < std::min(values.size(), prevs.size()); ++i) {
|
||||
FieldData& v = results[i];
|
||||
FieldData& b = prevs[i];
|
||||
ASSIGN_OR_RETURN(v, MergeMessages(b, v));
|
||||
MP_ASSIGN_OR_RETURN(v, MergeMessages(b, v));
|
||||
}
|
||||
}
|
||||
status.Update(SetFieldValues(message_data, field_path, results));
|
||||
|
|
|
@ -88,11 +88,11 @@ absl::Status CopyLiteralOptions(CalculatorGraphConfig::Node parent_node,
|
|||
FieldData parent_options;
|
||||
ASSIGN_IF_OK(parent_options,
|
||||
GetNodeOptions(parent_data, graph_extension_type));
|
||||
ASSIGN_OR_RETURN(graph_options,
|
||||
MergeMessages(graph_options, parent_options));
|
||||
MP_ASSIGN_OR_RETURN(graph_options,
|
||||
MergeMessages(graph_options, parent_options));
|
||||
FieldData node_options;
|
||||
ASSIGN_OR_RETURN(node_options,
|
||||
GetNodeOptions(node_data, node_extension_type));
|
||||
MP_ASSIGN_OR_RETURN(node_options,
|
||||
GetNodeOptions(node_data, node_extension_type));
|
||||
if (!node_options.has_message_value() ||
|
||||
!graph_options.has_message_value()) {
|
||||
continue;
|
||||
|
@ -100,7 +100,8 @@ absl::Status CopyLiteralOptions(CalculatorGraphConfig::Node parent_node,
|
|||
FieldPath graph_path = GetPath(graph_tag, MessageType(graph_options));
|
||||
FieldPath node_path = GetPath(node_tag, MessageType(node_options));
|
||||
std::vector<FieldData> packet_data;
|
||||
ASSIGN_OR_RETURN(packet_data, GetFieldValues(graph_options, graph_path));
|
||||
MP_ASSIGN_OR_RETURN(packet_data,
|
||||
GetFieldValues(graph_options, graph_path));
|
||||
MP_RETURN_IF_ERROR(
|
||||
MergeFieldValues(node_options, node_path, packet_data));
|
||||
options_field_util::SetOptionsMessage(node_options, &node);
|
||||
|
|
|
@ -338,8 +338,8 @@ absl::Status Equals(std::vector<FieldData> b1, std::vector<FieldData> b2) {
|
|||
using tool::options_field_util::AsPacket;
|
||||
RET_CHECK_EQ(b1.size(), b2.size());
|
||||
for (int i = 0; i < b1.size(); ++i) {
|
||||
ASSIGN_OR_RETURN(Packet p1, AsPacket(b1.at(i)));
|
||||
ASSIGN_OR_RETURN(Packet p2, AsPacket(b2.at(i)));
|
||||
MP_ASSIGN_OR_RETURN(Packet p1, AsPacket(b1.at(i)));
|
||||
MP_ASSIGN_OR_RETURN(Packet p2, AsPacket(b2.at(i)));
|
||||
MP_RETURN_IF_ERROR(Equals(p1.Get<FieldType>(), p2.Get<FieldType>()));
|
||||
}
|
||||
return absl::OkStatus();
|
||||
|
|
|
@ -15,10 +15,10 @@ absl::Status PacketGeneratorWrapperCalculator::GetContract(
|
|||
CalculatorContract* cc) {
|
||||
const auto& options =
|
||||
cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>();
|
||||
ASSIGN_OR_RETURN(auto static_access,
|
||||
mediapipe::internal::StaticAccessToGeneratorRegistry::
|
||||
CreateByNameInNamespace(options.package(),
|
||||
options.packet_generator()));
|
||||
MP_ASSIGN_OR_RETURN(auto static_access,
|
||||
mediapipe::internal::StaticAccessToGeneratorRegistry::
|
||||
CreateByNameInNamespace(options.package(),
|
||||
options.packet_generator()));
|
||||
MP_RETURN_IF_ERROR(static_access->FillExpectations(options.options(),
|
||||
&cc->InputSidePackets(),
|
||||
&cc->OutputSidePackets()))
|
||||
|
@ -30,10 +30,10 @@ absl::Status PacketGeneratorWrapperCalculator::GetContract(
|
|||
absl::Status PacketGeneratorWrapperCalculator::Open(CalculatorContext* cc) {
|
||||
const auto& options =
|
||||
cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>();
|
||||
ASSIGN_OR_RETURN(auto static_access,
|
||||
mediapipe::internal::StaticAccessToGeneratorRegistry::
|
||||
CreateByNameInNamespace(options.package(),
|
||||
options.packet_generator()));
|
||||
MP_ASSIGN_OR_RETURN(auto static_access,
|
||||
mediapipe::internal::StaticAccessToGeneratorRegistry::
|
||||
CreateByNameInNamespace(options.package(),
|
||||
options.packet_generator()));
|
||||
mediapipe::PacketSet output_packets(cc->OutputSidePackets().TagMap());
|
||||
MP_RETURN_IF_ERROR(static_access->Generate(options.options(),
|
||||
cc->InputSidePackets(),
|
||||
|
|
|
@ -196,7 +196,7 @@ absl::Status ProtoUtilLite::ReplaceFieldRange(
|
|||
proto_path.erase(proto_path.begin());
|
||||
FieldType type =
|
||||
!proto_path.empty() ? WireFormatLite::TYPE_MESSAGE : field_type;
|
||||
ASSIGN_OR_RETURN(auto r, AccessField(entry, type, *message));
|
||||
MP_ASSIGN_OR_RETURN(auto r, AccessField(entry, type, *message));
|
||||
FieldAccess& access = r.first;
|
||||
int index = r.second;
|
||||
std::vector<FieldValue>& v = *access.mutable_field_values();
|
||||
|
@ -223,7 +223,7 @@ absl::Status ProtoUtilLite::GetFieldRange(
|
|||
proto_path.erase(proto_path.begin());
|
||||
FieldType type =
|
||||
!proto_path.empty() ? WireFormatLite::TYPE_MESSAGE : field_type;
|
||||
ASSIGN_OR_RETURN(auto r, AccessField(entry, type, message));
|
||||
MP_ASSIGN_OR_RETURN(auto r, AccessField(entry, type, message));
|
||||
FieldAccess& access = r.first;
|
||||
int index = r.second;
|
||||
std::vector<FieldValue>& v = *access.mutable_field_values();
|
||||
|
@ -252,7 +252,7 @@ absl::Status ProtoUtilLite::GetFieldCount(const FieldValue& message,
|
|||
proto_path.erase(proto_path.begin());
|
||||
FieldType type =
|
||||
!proto_path.empty() ? WireFormatLite::TYPE_MESSAGE : field_type;
|
||||
ASSIGN_OR_RETURN(auto r, AccessField(entry, type, message));
|
||||
MP_ASSIGN_OR_RETURN(auto r, AccessField(entry, type, message));
|
||||
FieldAccess& access = r.first;
|
||||
int index = r.second;
|
||||
std::vector<FieldValue>& v = *access.mutable_field_values();
|
||||
|
|
|
@ -62,8 +62,8 @@ absl::Status FindIgnoredStreams(
|
|||
const proto_ns::RepeatedPtrField<ProtoString>& src_streams,
|
||||
const proto_ns::RepeatedPtrField<ProtoString>& dst_streams,
|
||||
std::set<std::string>* result) {
|
||||
ASSIGN_OR_RETURN(auto src_map, tool::TagMap::Create(src_streams));
|
||||
ASSIGN_OR_RETURN(auto dst_map, tool::TagMap::Create(dst_streams));
|
||||
MP_ASSIGN_OR_RETURN(auto src_map, tool::TagMap::Create(src_streams));
|
||||
MP_ASSIGN_OR_RETURN(auto dst_map, tool::TagMap::Create(dst_streams));
|
||||
for (auto id = src_map->BeginId(); id < src_map->EndId(); ++id) {
|
||||
std::pair<std::string, int> tag_index = src_map->TagAndIndexFromId(id);
|
||||
if (!dst_map->GetId(tag_index.first, tag_index.second).IsValid()) {
|
||||
|
@ -149,8 +149,8 @@ absl::Status FindCorrespondingStreams(
|
|||
std::map<std::string, std::string>* stream_map,
|
||||
const proto_ns::RepeatedPtrField<ProtoString>& src_streams,
|
||||
const proto_ns::RepeatedPtrField<ProtoString>& dst_streams) {
|
||||
ASSIGN_OR_RETURN(auto src_map, tool::TagMap::Create(src_streams));
|
||||
ASSIGN_OR_RETURN(auto dst_map, tool::TagMap::Create(dst_streams));
|
||||
MP_ASSIGN_OR_RETURN(auto src_map, tool::TagMap::Create(src_streams));
|
||||
MP_ASSIGN_OR_RETURN(auto dst_map, tool::TagMap::Create(dst_streams));
|
||||
for (const auto& it : dst_map->Mapping()) {
|
||||
const std::string& tag = it.first;
|
||||
const TagMap::TagData* src_tag_data =
|
||||
|
@ -299,9 +299,10 @@ absl::Status ExpandSubgraphs(CalculatorGraphConfig* config,
|
|||
std::string node_name = CanonicalNodeName(*config, node_id);
|
||||
MP_RETURN_IF_ERROR(ValidateSubgraphFields(node));
|
||||
SubgraphContext subgraph_context(&node, service_manager);
|
||||
ASSIGN_OR_RETURN(auto subgraph, graph_registry->CreateByName(
|
||||
config->package(), node.calculator(),
|
||||
&subgraph_context));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto subgraph,
|
||||
graph_registry->CreateByName(config->package(), node.calculator(),
|
||||
&subgraph_context));
|
||||
MP_RETURN_IF_ERROR(mediapipe::tool::DefineGraphOptions(node, &subgraph));
|
||||
MP_RETURN_IF_ERROR(PrefixNames(node_name, &subgraph));
|
||||
MP_RETURN_IF_ERROR(ConnectSubgraphStreams(node, &subgraph));
|
||||
|
|
|
@ -8,10 +8,10 @@ namespace mediapipe {
|
|||
absl::Status GraphProcessor::Initialize(CalculatorGraphConfig graph_config) {
|
||||
graph_config_ = graph_config;
|
||||
|
||||
ASSIGN_OR_RETURN(graph_input_map_,
|
||||
tool::TagMap::Create(graph_config_.input_stream()));
|
||||
ASSIGN_OR_RETURN(graph_output_map_,
|
||||
tool::TagMap::Create(graph_config_.output_stream()));
|
||||
MP_ASSIGN_OR_RETURN(graph_input_map_,
|
||||
tool::TagMap::Create(graph_config_.input_stream()));
|
||||
MP_ASSIGN_OR_RETURN(graph_output_map_,
|
||||
tool::TagMap::Create(graph_config_.output_stream()));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ class TagMap {
|
|||
|
||||
// Create a TagMap from a repeated string proto field of TAG:<index>:name.
|
||||
// This is the most common usage:
|
||||
// ASSIGN_OR_RETURN(std::shared_ptr<TagMap> tag_map,
|
||||
// MP_ASSIGN_OR_RETURN(std::shared_ptr<TagMap> tag_map,
|
||||
// tool::TagMap::Create(node.input_streams()));
|
||||
static absl::StatusOr<std::shared_ptr<TagMap>> Create(
|
||||
const proto_ns::RepeatedPtrField<ProtoString>& tag_index_names) {
|
||||
|
|
|
@ -215,22 +215,24 @@ bool CompareImageFrames(const ImageFrame& image1, const ImageFrame& image2,
|
|||
absl::Status CompareAndSaveImageOutput(
|
||||
absl::string_view golden_image_path, const ImageFrame& actual,
|
||||
const ImageFrameComparisonOptions& options) {
|
||||
ASSIGN_OR_RETURN(auto output_img_path, SavePngTestOutput(actual, "output"));
|
||||
MP_ASSIGN_OR_RETURN(auto output_img_path,
|
||||
SavePngTestOutput(actual, "output"));
|
||||
|
||||
auto expected =
|
||||
LoadTestImage(GetTestFilePath(golden_image_path), ImageFormat::UNKNOWN);
|
||||
if (!expected.ok()) {
|
||||
return expected.status();
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto expected_img_path,
|
||||
SavePngTestOutput(**expected, "expected"));
|
||||
MP_ASSIGN_OR_RETURN(auto expected_img_path,
|
||||
SavePngTestOutput(**expected, "expected"));
|
||||
|
||||
std::unique_ptr<ImageFrame> diff_img;
|
||||
auto status = CompareImageFrames(**expected, actual, options.max_color_diff,
|
||||
options.max_alpha_diff, options.max_avg_diff,
|
||||
diff_img);
|
||||
if (diff_img) {
|
||||
ASSIGN_OR_RETURN(auto diff_img_path, SavePngTestOutput(*diff_img, "diff"));
|
||||
MP_ASSIGN_OR_RETURN(auto diff_img_path,
|
||||
SavePngTestOutput(*diff_img, "diff"));
|
||||
}
|
||||
|
||||
return status;
|
||||
|
|
|
@ -46,7 +46,7 @@ absl::Status RunGeneratorFillExpectations(
|
|||
// side packet.
|
||||
PacketGeneratorConfig config = input_config;
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto static_access,
|
||||
internal::StaticAccessToGeneratorRegistry::CreateByNameInNamespace(
|
||||
package, config.packet_generator()),
|
||||
|
@ -81,7 +81,7 @@ absl::Status RunGenerateAndValidateTypes(
|
|||
const std::string& package) {
|
||||
ABSL_CHECK(output_side_packets);
|
||||
// Get static access to functions.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto static_access,
|
||||
internal::StaticAccessToGeneratorRegistry::CreateByNameInNamespace(
|
||||
package, packet_generator_name),
|
||||
|
|
|
@ -216,10 +216,11 @@ absl::Status NodeTypeInfo::Initialize(
|
|||
LegacyCalculatorSupport::Scoped<CalculatorContract> s(&contract_);
|
||||
// A number of calculators use the non-CC methods on GlCalculatorHelper
|
||||
// even though they are CalculatorBase-based.
|
||||
ASSIGN_OR_RETURN(auto calculator_factory,
|
||||
CalculatorBaseRegistry::CreateByNameInNamespace(
|
||||
validated_graph.Package(), node_class),
|
||||
_ << "Unable to find Calculator \"" << node_class << "\"");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto calculator_factory,
|
||||
CalculatorBaseRegistry::CreateByNameInNamespace(validated_graph.Package(),
|
||||
node_class),
|
||||
_ << "Unable to find Calculator \"" << node_class << "\"");
|
||||
MP_RETURN_IF_ERROR(calculator_factory->GetContract(&contract_)).SetPrepend()
|
||||
<< node_class << ": ";
|
||||
|
||||
|
@ -261,7 +262,7 @@ absl::Status NodeTypeInfo::Initialize(
|
|||
|
||||
// Run FillExpectations.
|
||||
const std::string& node_class = node.packet_generator();
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto static_access,
|
||||
internal::StaticAccessToGeneratorRegistry::CreateByNameInNamespace(
|
||||
validated_graph.Package(), node_class),
|
||||
|
@ -302,7 +303,7 @@ absl::Status NodeTypeInfo::Initialize(
|
|||
|
||||
// Run FillExpectations.
|
||||
const std::string& node_class = node.status_handler();
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto static_access,
|
||||
internal::StaticAccessToStatusHandlerRegistry::CreateByNameInNamespace(
|
||||
validated_graph.Package(), node_class),
|
||||
|
@ -602,8 +603,8 @@ absl::Status ValidatedGraphConfig::AddOutputSidePacketsForNode(
|
|||
absl::Status ValidatedGraphConfig::InitializeStreamInfo(
|
||||
bool* need_sorting_ptr) {
|
||||
// Define output streams for graph input streams.
|
||||
ASSIGN_OR_RETURN(std::shared_ptr<tool::TagMap> graph_input_streams,
|
||||
tool::TagMap::Create(config_.input_stream()));
|
||||
MP_ASSIGN_OR_RETURN(std::shared_ptr<tool::TagMap> graph_input_streams,
|
||||
tool::TagMap::Create(config_.input_stream()));
|
||||
for (int index = 0; index < graph_input_streams->Names().size(); ++index) {
|
||||
std::string name = graph_input_streams->Names()[index];
|
||||
owned_packet_types_.emplace_back(new PacketType());
|
||||
|
|
|
@ -177,7 +177,7 @@ absl::Status GlContext::CreateContextInternal(EGLContext share_context,
|
|||
}
|
||||
|
||||
absl::Status GlContext::CreateContext(EGLContext share_context) {
|
||||
ASSIGN_OR_RETURN(display_, GetInitializedEglDisplay());
|
||||
MP_ASSIGN_OR_RETURN(display_, GetInitializedEglDisplay());
|
||||
|
||||
auto status = CreateContextInternal(share_context, 3);
|
||||
if (!status.ok()) {
|
||||
|
|
|
@ -74,7 +74,7 @@ GpuResources::StatusOrGpuResources GpuResources::Create() {
|
|||
|
||||
GpuResources::StatusOrGpuResources GpuResources::Create(
|
||||
PlatformGlContext external_context) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::shared_ptr<GlContext> context,
|
||||
GlContext::Create(external_context, kGlContextUseDedicatedThread));
|
||||
std::shared_ptr<GpuResources> gpu_resources(
|
||||
|
@ -150,8 +150,8 @@ absl::Status GpuResources::PrepareGpuNode(CalculatorNode* node) {
|
|||
#endif // !__EMSCRIPTEN__
|
||||
node_key_[node_id] = context_key;
|
||||
|
||||
ASSIGN_OR_RETURN(std::shared_ptr<GlContext> context,
|
||||
GetOrCreateGlContext(context_key));
|
||||
MP_ASSIGN_OR_RETURN(std::shared_ptr<GlContext> context,
|
||||
GetOrCreateGlContext(context_key));
|
||||
|
||||
if (kGlContextUseDedicatedThread) {
|
||||
std::string executor_name =
|
||||
|
@ -186,9 +186,9 @@ GlContext::StatusOrGlContext GpuResources::GetOrCreateGlContext(
|
|||
const std::string& key) {
|
||||
auto it = gl_key_context_.find(key);
|
||||
if (it == gl_key_context_.end()) {
|
||||
ASSIGN_OR_RETURN(std::shared_ptr<GlContext> new_context,
|
||||
GlContext::Create(*gl_key_context_[SharedContextKey()],
|
||||
kGlContextUseDedicatedThread));
|
||||
MP_ASSIGN_OR_RETURN(std::shared_ptr<GlContext> new_context,
|
||||
GlContext::Create(*gl_key_context_[SharedContextKey()],
|
||||
kGlContextUseDedicatedThread));
|
||||
it = gl_key_context_.emplace(key, new_context).first;
|
||||
#if MEDIAPIPE_GPU_BUFFER_USE_CV_PIXEL_BUFFER
|
||||
texture_caches_->RegisterTextureCache(it->second->cv_texture_cache());
|
||||
|
|
|
@ -589,9 +589,9 @@ absl::Status Graph::SetParentGlContext(int64_t java_gl_context) {
|
|||
"trying to set the parent GL context, but the gpu shared "
|
||||
"data has already been set up.");
|
||||
}
|
||||
ASSIGN_OR_RETURN(gpu_resources_,
|
||||
mediapipe::GpuResources::Create(
|
||||
reinterpret_cast<EGLContext>(java_gl_context)));
|
||||
MP_ASSIGN_OR_RETURN(gpu_resources_,
|
||||
mediapipe::GpuResources::Create(
|
||||
reinterpret_cast<EGLContext>(java_gl_context)));
|
||||
#endif // MEDIAPIPE_DISABLE_GPU
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
|
|
@ -119,22 +119,22 @@ class EffectRendererCalculator : public CalculatorBase {
|
|||
|
||||
absl::optional<face_geometry::Mesh3d> effect_mesh_3d;
|
||||
if (options.has_effect_mesh_3d_path()) {
|
||||
ASSIGN_OR_RETURN(effect_mesh_3d,
|
||||
ReadMesh3dFromFile(options.effect_mesh_3d_path()),
|
||||
_ << "Failed to read the effect 3D mesh from file!");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
effect_mesh_3d, ReadMesh3dFromFile(options.effect_mesh_3d_path()),
|
||||
_ << "Failed to read the effect 3D mesh from file!");
|
||||
|
||||
MP_RETURN_IF_ERROR(face_geometry::ValidateMesh3d(*effect_mesh_3d))
|
||||
<< "Invalid effect 3D mesh!";
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(ImageFrame effect_texture,
|
||||
ReadTextureFromFile(options.effect_texture_path()),
|
||||
_ << "Failed to read the effect texture from file!");
|
||||
MP_ASSIGN_OR_RETURN(ImageFrame effect_texture,
|
||||
ReadTextureFromFile(options.effect_texture_path()),
|
||||
_ << "Failed to read the effect texture from file!");
|
||||
|
||||
ASSIGN_OR_RETURN(effect_renderer_,
|
||||
CreateEffectRenderer(environment, effect_mesh_3d,
|
||||
std::move(effect_texture)),
|
||||
_ << "Failed to create the effect renderer!");
|
||||
MP_ASSIGN_OR_RETURN(effect_renderer_,
|
||||
CreateEffectRenderer(environment, effect_mesh_3d,
|
||||
std::move(effect_texture)),
|
||||
_ << "Failed to create the effect renderer!");
|
||||
|
||||
return absl::OkStatus();
|
||||
});
|
||||
|
@ -202,9 +202,9 @@ class EffectRendererCalculator : public CalculatorBase {
|
|||
private:
|
||||
static absl::StatusOr<ImageFrame> ReadTextureFromFile(
|
||||
const std::string& texture_path) {
|
||||
ASSIGN_OR_RETURN(std::string texture_blob,
|
||||
ReadContentBlobFromFile(texture_path),
|
||||
_ << "Failed to read texture blob from file!");
|
||||
MP_ASSIGN_OR_RETURN(std::string texture_blob,
|
||||
ReadContentBlobFromFile(texture_path),
|
||||
_ << "Failed to read texture blob from file!");
|
||||
|
||||
// Use OpenCV image decoding functionality to finish reading the texture.
|
||||
std::vector<char> texture_blob_vector(texture_blob.begin(),
|
||||
|
@ -246,9 +246,9 @@ class EffectRendererCalculator : public CalculatorBase {
|
|||
|
||||
static absl::StatusOr<face_geometry::Mesh3d> ReadMesh3dFromFile(
|
||||
const std::string& mesh_3d_path) {
|
||||
ASSIGN_OR_RETURN(std::string mesh_3d_blob,
|
||||
ReadContentBlobFromFile(mesh_3d_path),
|
||||
_ << "Failed to read mesh 3D blob from file!");
|
||||
MP_ASSIGN_OR_RETURN(std::string mesh_3d_blob,
|
||||
ReadContentBlobFromFile(mesh_3d_path),
|
||||
_ << "Failed to read mesh 3D blob from file!");
|
||||
|
||||
face_geometry::Mesh3d mesh_3d;
|
||||
RET_CHECK(mesh_3d.ParseFromString(mesh_3d_blob))
|
||||
|
@ -259,9 +259,10 @@ class EffectRendererCalculator : public CalculatorBase {
|
|||
|
||||
static absl::StatusOr<std::string> ReadContentBlobFromFile(
|
||||
const std::string& unresolved_path) {
|
||||
ASSIGN_OR_RETURN(std::string resolved_path,
|
||||
mediapipe::PathToResourceAsFile(unresolved_path),
|
||||
_ << "Failed to resolve path! Path = " << unresolved_path);
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::string resolved_path,
|
||||
mediapipe::PathToResourceAsFile(unresolved_path),
|
||||
_ << "Failed to resolve path! Path = " << unresolved_path);
|
||||
|
||||
std::string content_blob;
|
||||
MP_RETURN_IF_ERROR(
|
||||
|
|
|
@ -92,7 +92,7 @@ class GeometryPipelineCalculator : public CalculatorBase {
|
|||
|
||||
const auto& options = cc->Options<FaceGeometryPipelineCalculatorOptions>();
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
face_geometry::GeometryPipelineMetadata metadata,
|
||||
ReadMetadataFromFile(options.metadata_path()),
|
||||
_ << "Failed to read the geometry pipeline metadata from file!");
|
||||
|
@ -109,7 +109,7 @@ class GeometryPipelineCalculator : public CalculatorBase {
|
|||
MP_RETURN_IF_ERROR(face_geometry::ValidateEnvironment(environment))
|
||||
<< "Invalid environment!";
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
geometry_pipeline_,
|
||||
face_geometry::CreateGeometryPipeline(environment, metadata),
|
||||
_ << "Failed to create a geometry pipeline!");
|
||||
|
@ -136,7 +136,7 @@ class GeometryPipelineCalculator : public CalculatorBase {
|
|||
auto multi_face_geometry =
|
||||
absl::make_unique<std::vector<face_geometry::FaceGeometry>>();
|
||||
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
*multi_face_geometry,
|
||||
geometry_pipeline_->EstimateFaceGeometry(
|
||||
multi_face_landmarks, //
|
||||
|
@ -160,9 +160,9 @@ class GeometryPipelineCalculator : public CalculatorBase {
|
|||
private:
|
||||
static absl::StatusOr<face_geometry::GeometryPipelineMetadata>
|
||||
ReadMetadataFromFile(const std::string& metadata_path) {
|
||||
ASSIGN_OR_RETURN(std::string metadata_blob,
|
||||
ReadContentBlobFromFile(metadata_path),
|
||||
_ << "Failed to read a metadata blob from file!");
|
||||
MP_ASSIGN_OR_RETURN(std::string metadata_blob,
|
||||
ReadContentBlobFromFile(metadata_path),
|
||||
_ << "Failed to read a metadata blob from file!");
|
||||
|
||||
face_geometry::GeometryPipelineMetadata metadata;
|
||||
RET_CHECK(metadata.ParseFromString(metadata_blob))
|
||||
|
@ -173,9 +173,10 @@ class GeometryPipelineCalculator : public CalculatorBase {
|
|||
|
||||
static absl::StatusOr<std::string> ReadContentBlobFromFile(
|
||||
const std::string& unresolved_path) {
|
||||
ASSIGN_OR_RETURN(std::string resolved_path,
|
||||
mediapipe::PathToResourceAsFile(unresolved_path),
|
||||
_ << "Failed to resolve path! Path = " << unresolved_path);
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::string resolved_path,
|
||||
mediapipe::PathToResourceAsFile(unresolved_path),
|
||||
_ << "Failed to resolve path! Path = " << unresolved_path);
|
||||
|
||||
std::string content_blob;
|
||||
MP_RETURN_IF_ERROR(
|
||||
|
|
|
@ -48,19 +48,19 @@ struct RenderableMesh3d {
|
|||
|
||||
RenderableMesh3d renderable_mesh_3d;
|
||||
renderable_mesh_3d.vertex_size = GetVertexSize(vertex_type);
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_mesh_3d.vertex_position_size,
|
||||
GetVertexComponentSize(vertex_type, VertexComponent::POSITION),
|
||||
_ << "Failed to get the position vertex size!");
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_mesh_3d.tex_coord_position_size,
|
||||
GetVertexComponentSize(vertex_type, VertexComponent::TEX_COORD),
|
||||
_ << "Failed to get the tex coord vertex size!");
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_mesh_3d.vertex_position_offset,
|
||||
GetVertexComponentOffset(vertex_type, VertexComponent::POSITION),
|
||||
_ << "Failed to get the position vertex offset!");
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_mesh_3d.tex_coord_position_offset,
|
||||
GetVertexComponentOffset(vertex_type, VertexComponent::TEX_COORD),
|
||||
_ << "Failed to get the tex coord vertex offset!");
|
||||
|
@ -473,12 +473,12 @@ class EffectRendererImpl : public EffectRenderer {
|
|||
}
|
||||
|
||||
// Wrap both source and destination textures.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::unique_ptr<Texture> src_texture,
|
||||
Texture::WrapExternalTexture(src_texture_name, src_texture_target,
|
||||
frame_width, frame_height),
|
||||
_ << "Failed to wrap the external source texture");
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::unique_ptr<Texture> dst_texture,
|
||||
Texture::WrapExternalTexture(dst_texture_name, dst_texture_target,
|
||||
frame_width, frame_height),
|
||||
|
@ -506,14 +506,14 @@ class EffectRendererImpl : public EffectRenderer {
|
|||
const FaceGeometry& face_geometry = multi_face_geometry[i];
|
||||
|
||||
// Extract the face pose transformation matrix.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
face_pose_transform_matrices[i],
|
||||
Convert4x4MatrixDataToArrayFormat(
|
||||
face_geometry.pose_transform_matrix()),
|
||||
_ << "Failed to extract the face pose transformation matrix!");
|
||||
|
||||
// Extract the face mesh as a renderable.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_face_meshes[i],
|
||||
RenderableMesh3d::CreateFromProtoMesh3d(face_geometry.mesh()),
|
||||
_ << "Failed to extract a renderable face mesh!");
|
||||
|
@ -699,26 +699,28 @@ absl::StatusOr<std::unique_ptr<EffectRenderer>> CreateEffectRenderer(
|
|||
<< "Invalid effect 3D mesh!";
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<RenderTarget> render_target,
|
||||
RenderTarget::Create(),
|
||||
_ << "Failed to create a render target!");
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<Renderer> renderer, Renderer::Create(),
|
||||
_ << "Failed to create a renderer!");
|
||||
ASSIGN_OR_RETURN(RenderableMesh3d renderable_quad_mesh_3d,
|
||||
RenderableMesh3d::CreateFromProtoMesh3d(CreateQuadMesh3d()),
|
||||
_ << "Failed to create a renderable quad mesh!");
|
||||
MP_ASSIGN_OR_RETURN(std::unique_ptr<RenderTarget> render_target,
|
||||
RenderTarget::Create(),
|
||||
_ << "Failed to create a render target!");
|
||||
MP_ASSIGN_OR_RETURN(std::unique_ptr<Renderer> renderer, Renderer::Create(),
|
||||
_ << "Failed to create a renderer!");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
RenderableMesh3d renderable_quad_mesh_3d,
|
||||
RenderableMesh3d::CreateFromProtoMesh3d(CreateQuadMesh3d()),
|
||||
_ << "Failed to create a renderable quad mesh!");
|
||||
absl::optional<RenderableMesh3d> renderable_effect_mesh_3d;
|
||||
if (effect_mesh_3d) {
|
||||
ASSIGN_OR_RETURN(renderable_effect_mesh_3d,
|
||||
RenderableMesh3d::CreateFromProtoMesh3d(*effect_mesh_3d),
|
||||
_ << "Failed to create a renderable effect mesh!");
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
renderable_effect_mesh_3d,
|
||||
RenderableMesh3d::CreateFromProtoMesh3d(*effect_mesh_3d),
|
||||
_ << "Failed to create a renderable effect mesh!");
|
||||
}
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<Texture> empty_color_gl_texture,
|
||||
Texture::CreateFromImageFrame(CreateEmptyColorTexture()),
|
||||
_ << "Failed to create an empty color texture!");
|
||||
ASSIGN_OR_RETURN(std::unique_ptr<Texture> effect_gl_texture,
|
||||
Texture::CreateFromImageFrame(effect_texture),
|
||||
_ << "Failed to create an effect texture!");
|
||||
MP_ASSIGN_OR_RETURN(std::unique_ptr<Texture> empty_color_gl_texture,
|
||||
Texture::CreateFromImageFrame(CreateEmptyColorTexture()),
|
||||
_ << "Failed to create an empty color texture!");
|
||||
MP_ASSIGN_OR_RETURN(std::unique_ptr<Texture> effect_gl_texture,
|
||||
Texture::CreateFromImageFrame(effect_texture),
|
||||
_ << "Failed to create an effect texture!");
|
||||
|
||||
std::unique_ptr<EffectRenderer> result =
|
||||
absl::make_unique<EffectRendererImpl>(
|
||||
|
|
|
@ -142,9 +142,9 @@ class ScreenToMetricSpaceConverter {
|
|||
Eigen::Matrix3Xf intermediate_landmarks(screen_landmarks);
|
||||
ChangeHandedness(intermediate_landmarks);
|
||||
|
||||
ASSIGN_OR_RETURN(const float first_iteration_scale,
|
||||
EstimateScale(intermediate_landmarks),
|
||||
_ << "Failed to estimate first iteration scale!");
|
||||
MP_ASSIGN_OR_RETURN(const float first_iteration_scale,
|
||||
EstimateScale(intermediate_landmarks),
|
||||
_ << "Failed to estimate first iteration scale!");
|
||||
|
||||
// 2nd iteration: unproject XY using the scale from the 1st iteration.
|
||||
intermediate_landmarks = screen_landmarks;
|
||||
|
@ -167,9 +167,9 @@ class ScreenToMetricSpaceConverter {
|
|||
canonical_metric_landmarks_.colwise().homogeneous())
|
||||
.row(2);
|
||||
}
|
||||
ASSIGN_OR_RETURN(const float second_iteration_scale,
|
||||
EstimateScale(intermediate_landmarks),
|
||||
_ << "Failed to estimate second iteration scale!");
|
||||
MP_ASSIGN_OR_RETURN(const float second_iteration_scale,
|
||||
EstimateScale(intermediate_landmarks),
|
||||
_ << "Failed to estimate second iteration scale!");
|
||||
|
||||
// Use the total scale to unproject the screen landmarks.
|
||||
const float total_scale = first_iteration_scale * second_iteration_scale;
|
||||
|
|
|
@ -172,7 +172,7 @@ class FloatPrecisionProcrustesSolver : public ProcrustesSolver {
|
|||
MP_RETURN_IF_ERROR(ComputeOptimalRotation(
|
||||
weighted_targets * centered_weighted_sources.transpose(), rotation))
|
||||
<< "Failed to compute the optimal rotation!";
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
float scale,
|
||||
ComputeOptimalScale(centered_weighted_sources, weighted_sources,
|
||||
weighted_targets, rotation),
|
||||
|
|
|
@ -315,8 +315,8 @@ absl::Status CreateCVPixelBufferForImageFramePacket(
|
|||
auto image_frame = std::const_pointer_cast<mediapipe::ImageFrame>(
|
||||
mediapipe::SharedPtrWithPacket<mediapipe::ImageFrame>(
|
||||
image_frame_packet));
|
||||
ASSIGN_OR_RETURN(*out_buffer, CreateCVPixelBufferForImageFrame(
|
||||
image_frame, can_overwrite));
|
||||
MP_ASSIGN_OR_RETURN(*out_buffer, CreateCVPixelBufferForImageFrame(
|
||||
image_frame, can_overwrite));
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
||||
|
@ -339,9 +339,9 @@ absl::StatusOr<CFHolder<CVPixelBufferRef>> CreateCVPixelBufferForImageFrame(
|
|||
if (can_overwrite) {
|
||||
v_dest = v_image;
|
||||
} else {
|
||||
ASSIGN_OR_RETURN(pixel_buffer,
|
||||
CreateCVPixelBufferWithoutPool(
|
||||
frame.Width(), frame.Height(), pixel_format));
|
||||
MP_ASSIGN_OR_RETURN(pixel_buffer,
|
||||
CreateCVPixelBufferWithoutPool(
|
||||
frame.Width(), frame.Height(), pixel_format));
|
||||
status = CVPixelBufferLockBaseAddress(*pixel_buffer,
|
||||
kCVPixelBufferLock_ReadOnly);
|
||||
RET_CHECK(status == kCVReturnSuccess)
|
||||
|
@ -456,9 +456,9 @@ absl::StatusOr<CFHolder<CVPixelBufferRef>> CreateCVPixelBufferCopyingImageFrame(
|
|||
}
|
||||
|
||||
CVReturn cv_err;
|
||||
ASSIGN_OR_RETURN(pixel_buffer, CreateCVPixelBufferWithoutPool(
|
||||
image_frame.Width(), image_frame.Height(),
|
||||
pixel_format));
|
||||
MP_ASSIGN_OR_RETURN(pixel_buffer, CreateCVPixelBufferWithoutPool(
|
||||
image_frame.Width(),
|
||||
image_frame.Height(), pixel_format));
|
||||
cv_err =
|
||||
CVPixelBufferLockBaseAddress(*pixel_buffer, kCVPixelBufferLock_ReadOnly);
|
||||
RET_CHECK(cv_err == kCVReturnSuccess)
|
||||
|
|
|
@ -91,7 +91,7 @@ absl::StatusOr<AudioTensorSpecs> BuildPreprocessingSpecs(
|
|||
}
|
||||
const auto* input_tensor =
|
||||
(*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]];
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const auto* audio_tensor_metadata,
|
||||
GetAudioTensorMetadataIfAny(*model_resources.GetMetadataExtractor(), 0));
|
||||
return BuildInputAudioTensorSpecs(*input_tensor, audio_tensor_metadata);
|
||||
|
@ -154,11 +154,11 @@ class AudioClassifierGraph : public core::ModelTaskGraph {
|
|||
public:
|
||||
absl::StatusOr<CalculatorGraphConfig> GetConfig(
|
||||
SubgraphContext* sc) override {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const auto* model_resources,
|
||||
CreateModelResources<proto::AudioClassifierGraphOptions>(sc));
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto output_streams,
|
||||
BuildAudioClassificationTask(
|
||||
sc->Options<proto::AudioClassifierGraphOptions>(), *model_resources,
|
||||
|
@ -202,8 +202,8 @@ class AudioClassifierGraph : public core::ModelTaskGraph {
|
|||
}
|
||||
|
||||
// Adds AudioToTensorCalculator and connects it to the graph input streams.
|
||||
ASSIGN_OR_RETURN(auto audio_tensor_specs,
|
||||
BuildPreprocessingSpecs(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(auto audio_tensor_specs,
|
||||
BuildPreprocessingSpecs(model_resources));
|
||||
auto& audio_to_tensor = graph.AddNode("AudioToTensorCalculator");
|
||||
ConfigureAudioToTensorCalculator(
|
||||
audio_tensor_specs, use_stream_mode,
|
||||
|
|
|
@ -83,7 +83,7 @@ absl::StatusOr<AudioTensorSpecs> BuildPreprocessingSpecs(
|
|||
}
|
||||
const auto* input_tensor =
|
||||
(*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]];
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const auto* audio_tensor_metadata,
|
||||
GetAudioTensorMetadataIfAny(*model_resources.GetMetadataExtractor(), 0));
|
||||
return BuildInputAudioTensorSpecs(*input_tensor, audio_tensor_metadata);
|
||||
|
@ -144,11 +144,11 @@ class AudioEmbedderGraph : public core::ModelTaskGraph {
|
|||
public:
|
||||
absl::StatusOr<CalculatorGraphConfig> GetConfig(
|
||||
SubgraphContext* sc) override {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const auto* model_resources,
|
||||
CreateModelResources<proto::AudioEmbedderGraphOptions>(sc));
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto output_streams,
|
||||
BuildAudioEmbeddingTask(
|
||||
sc->Options<proto::AudioEmbedderGraphOptions>(), *model_resources,
|
||||
|
@ -178,8 +178,8 @@ class AudioEmbedderGraph : public core::ModelTaskGraph {
|
|||
MediaPipeTasksStatus::kMetadataNotFoundError);
|
||||
}
|
||||
// Adds AudioToTensorCalculator and connects it to the graph input streams.
|
||||
ASSIGN_OR_RETURN(auto audio_tensor_specs,
|
||||
BuildPreprocessingSpecs(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(auto audio_tensor_specs,
|
||||
BuildPreprocessingSpecs(model_resources));
|
||||
auto& audio_to_tensor = graph.AddNode("AudioToTensorCalculator");
|
||||
ConfigureAudioToTensorCalculator(
|
||||
audio_tensor_specs, use_stream_mode,
|
||||
|
|
|
@ -81,10 +81,10 @@ class AudioTaskApiFactory {
|
|||
"callback shouldn't be provided.",
|
||||
MediaPipeTasksStatus::kInvalidTaskGraphConfigError);
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto runner,
|
||||
tasks::core::TaskRunner::Create(
|
||||
std::move(graph_config), std::move(resolver),
|
||||
std::move(packets_callback)));
|
||||
MP_ASSIGN_OR_RETURN(auto runner,
|
||||
tasks::core::TaskRunner::Create(
|
||||
std::move(graph_config), std::move(resolver),
|
||||
std::move(packets_callback)));
|
||||
return std::make_unique<T>(std::move(runner), running_mode);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -107,8 +107,8 @@ absl::StatusOr<AudioTensorSpecs> BuildInputAudioTensorSpecs(
|
|||
MediaPipeTasksStatus::kMetadataNotFoundError);
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(const AudioProperties* props,
|
||||
GetAudioPropertiesIfAny(*audio_tensor_metadata));
|
||||
MP_ASSIGN_OR_RETURN(const AudioProperties* props,
|
||||
GetAudioPropertiesIfAny(*audio_tensor_metadata));
|
||||
// Input-related specifications.
|
||||
int tensor_shape_size = audio_tensor.shape()->size();
|
||||
if (tensor_shape_size > 2) {
|
||||
|
|
|
@ -98,13 +98,13 @@ class ClassificationAggregationCalculatorTest : public tflite::testing::Test {
|
|||
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig()));
|
||||
if (connect_timestamps) {
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedClassificationsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedClassificationsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kClassificationsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kClassificationsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
|
|
|
@ -74,13 +74,13 @@ class EmbeddingAggregationCalculatorTest : public tflite::testing::Test {
|
|||
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig()));
|
||||
if (connect_timestamps) {
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedEmbeddingsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedEmbeddingsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kEmbeddingsOutName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kEmbeddingsOutName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
|
|
|
@ -178,9 +178,9 @@ absl::Status ScoreCalibrationCalculator::Process(CalculatorContext* cc) {
|
|||
for (int i = 0; i < num_scores; ++i) {
|
||||
// Use the "safe" flavor as we need to check that the externally provided
|
||||
// indices are not out-of-bounds.
|
||||
ASSIGN_OR_RETURN(raw_calibrated_scores[i],
|
||||
SafeComputeCalibratedScore(
|
||||
static_cast<int>(raw_indices[i]), raw_scores[i]));
|
||||
MP_ASSIGN_OR_RETURN(raw_calibrated_scores[i],
|
||||
SafeComputeCalibratedScore(
|
||||
static_cast<int>(raw_indices[i]), raw_scores[i]));
|
||||
}
|
||||
} else {
|
||||
if (num_scores != options_.sigmoids_size()) {
|
||||
|
|
|
@ -180,16 +180,17 @@ absl::StatusOr<LabelItems> GetLabelItemsIfAny(
|
|||
LabelItems empty_label_items;
|
||||
return empty_label_items;
|
||||
}
|
||||
ASSIGN_OR_RETURN(absl::string_view labels_file,
|
||||
metadata_extractor.GetAssociatedFile(labels_filename));
|
||||
MP_ASSIGN_OR_RETURN(absl::string_view labels_file,
|
||||
metadata_extractor.GetAssociatedFile(labels_filename));
|
||||
const std::string display_names_filename =
|
||||
ModelMetadataExtractor::FindFirstAssociatedFileName(
|
||||
tensor_metadata, tflite::AssociatedFileType_TENSOR_AXIS_LABELS,
|
||||
locale);
|
||||
absl::string_view display_names_file;
|
||||
if (!display_names_filename.empty()) {
|
||||
ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
|
||||
display_names_filename));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
display_names_file,
|
||||
metadata_extractor.GetAssociatedFile(display_names_filename));
|
||||
}
|
||||
return mediapipe::BuildLabelMapFromFiles(labels_file, display_names_file);
|
||||
}
|
||||
|
@ -199,10 +200,10 @@ absl::StatusOr<LabelItems> GetLabelItemsIfAny(
|
|||
absl::StatusOr<float> GetScoreThreshold(
|
||||
const ModelMetadataExtractor& metadata_extractor,
|
||||
const TensorMetadata& tensor_metadata) {
|
||||
ASSIGN_OR_RETURN(const ProcessUnit* score_thresholding_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
tensor_metadata,
|
||||
tflite::ProcessUnitOptions_ScoreThresholdingOptions));
|
||||
MP_ASSIGN_OR_RETURN(const ProcessUnit* score_thresholding_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
tensor_metadata,
|
||||
tflite::ProcessUnitOptions_ScoreThresholdingOptions));
|
||||
if (score_thresholding_process_unit == nullptr) {
|
||||
return kDefaultScoreThreshold;
|
||||
}
|
||||
|
@ -255,10 +256,10 @@ absl::Status ConfigureScoreCalibrationIfAny(
|
|||
return absl::OkStatus();
|
||||
}
|
||||
// Get ScoreCalibrationOptions, if any.
|
||||
ASSIGN_OR_RETURN(const ProcessUnit* score_calibration_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
*tensor_metadata,
|
||||
tflite::ProcessUnitOptions_ScoreCalibrationOptions));
|
||||
MP_ASSIGN_OR_RETURN(const ProcessUnit* score_calibration_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
*tensor_metadata,
|
||||
tflite::ProcessUnitOptions_ScoreCalibrationOptions));
|
||||
if (score_calibration_process_unit == nullptr) {
|
||||
return absl::OkStatus();
|
||||
}
|
||||
|
@ -276,7 +277,7 @@ absl::Status ConfigureScoreCalibrationIfAny(
|
|||
"parameters file with type TENSOR_AXIS_SCORE_CALIBRATION.",
|
||||
MediaPipeTasksStatus::kMetadataAssociatedFileNotFoundError);
|
||||
}
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
absl::string_view score_calibration_file,
|
||||
metadata_extractor.GetAssociatedFile(score_calibration_filename));
|
||||
ScoreCalibrationCalculatorOptions calculator_options;
|
||||
|
@ -317,15 +318,15 @@ absl::Status ConfigureTensorsToClassificationCalculator(
|
|||
LabelItems label_items;
|
||||
float score_threshold = kDefaultScoreThreshold;
|
||||
if (tensor_metadata != nullptr) {
|
||||
ASSIGN_OR_RETURN(label_items,
|
||||
GetLabelItemsIfAny(metadata_extractor, *tensor_metadata,
|
||||
options.display_names_locale()));
|
||||
ASSIGN_OR_RETURN(score_threshold,
|
||||
GetScoreThreshold(metadata_extractor, *tensor_metadata));
|
||||
MP_ASSIGN_OR_RETURN(label_items,
|
||||
GetLabelItemsIfAny(metadata_extractor, *tensor_metadata,
|
||||
options.display_names_locale()));
|
||||
MP_ASSIGN_OR_RETURN(score_threshold, GetScoreThreshold(metadata_extractor,
|
||||
*tensor_metadata));
|
||||
}
|
||||
// Allowlist / denylist.
|
||||
ASSIGN_OR_RETURN(auto allow_or_deny_categories,
|
||||
GetAllowOrDenyCategoryIndicesIfAny(options, label_items));
|
||||
MP_ASSIGN_OR_RETURN(auto allow_or_deny_categories,
|
||||
GetAllowOrDenyCategoryIndicesIfAny(options, label_items));
|
||||
if (!allow_or_deny_categories.empty()) {
|
||||
if (options.category_allowlist_size()) {
|
||||
calculator_options->mutable_allow_classes()->Assign(
|
||||
|
@ -359,8 +360,8 @@ absl::Status ConfigureClassificationPostprocessingGraph(
|
|||
const proto::ClassifierOptions& classifier_options,
|
||||
proto::ClassificationPostprocessingGraphOptions* options) {
|
||||
MP_RETURN_IF_ERROR(SanityCheckClassifierOptions(classifier_options));
|
||||
ASSIGN_OR_RETURN(const auto heads_properties,
|
||||
GetClassificationHeadsProperties(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(const auto heads_properties,
|
||||
GetClassificationHeadsProperties(model_resources));
|
||||
for (int i = 0; i < heads_properties.num_heads; ++i) {
|
||||
MP_RETURN_IF_ERROR(ConfigureScoreCalibrationIfAny(
|
||||
*model_resources.GetMetadataExtractor(), i, options));
|
||||
|
@ -406,7 +407,7 @@ class ClassificationPostprocessingGraph : public mediapipe::Subgraph {
|
|||
absl::StatusOr<mediapipe::CalculatorGraphConfig> GetConfig(
|
||||
mediapipe::SubgraphContext* sc) override {
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto output_streams,
|
||||
BuildClassificationPostprocessing(
|
||||
sc->Options<proto::ClassificationPostprocessingGraphOptions>(),
|
||||
|
|
|
@ -422,8 +422,8 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
absl::StatusOr<OutputStreamPoller> BuildGraph(
|
||||
absl::string_view model_name, const proto::ClassifierOptions& options,
|
||||
bool connect_timestamps = false) {
|
||||
ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
MP_ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
|
||||
Graph graph;
|
||||
auto& postprocessing = graph.AddNode(
|
||||
|
@ -450,13 +450,13 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig()));
|
||||
if (connect_timestamps) {
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedClassificationsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedClassificationsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kClassificationsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kClassificationsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
|
|
|
@ -221,15 +221,16 @@ absl::StatusOr<LabelItems> GetLabelItemsIfAny(
|
|||
LabelItems empty_label_items;
|
||||
return empty_label_items;
|
||||
}
|
||||
ASSIGN_OR_RETURN(absl::string_view labels_file,
|
||||
metadata_extractor.GetAssociatedFile(labels_filename));
|
||||
MP_ASSIGN_OR_RETURN(absl::string_view labels_file,
|
||||
metadata_extractor.GetAssociatedFile(labels_filename));
|
||||
const std::string display_names_filename =
|
||||
ModelMetadataExtractor::FindFirstAssociatedFileName(
|
||||
tensor_metadata, associated_file_type, locale);
|
||||
absl::string_view display_names_file;
|
||||
if (!display_names_filename.empty()) {
|
||||
ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
|
||||
display_names_filename));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
display_names_file,
|
||||
metadata_extractor.GetAssociatedFile(display_names_filename));
|
||||
}
|
||||
return mediapipe::BuildLabelMapFromFiles(labels_file, display_names_file);
|
||||
}
|
||||
|
@ -237,7 +238,7 @@ absl::StatusOr<LabelItems> GetLabelItemsIfAny(
|
|||
absl::StatusOr<float> GetScoreThreshold(
|
||||
const ModelMetadataExtractor& metadata_extractor,
|
||||
const TensorMetadata& tensor_metadata) {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const ProcessUnit* score_thresholding_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
tensor_metadata, ProcessUnitOptions_ScoreThresholdingOptions));
|
||||
|
@ -288,7 +289,7 @@ GetScoreCalibrationOptionsIfAny(
|
|||
const ModelMetadataExtractor& metadata_extractor,
|
||||
const TensorMetadata& tensor_metadata) {
|
||||
// Get ScoreCalibrationOptions, if any.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
const ProcessUnit* score_calibration_process_unit,
|
||||
metadata_extractor.FindFirstProcessUnit(
|
||||
tensor_metadata, tflite::ProcessUnitOptions_ScoreCalibrationOptions));
|
||||
|
@ -309,7 +310,7 @@ GetScoreCalibrationOptionsIfAny(
|
|||
"parameters file with type TENSOR_AXIS_SCORE_CALIBRATION.",
|
||||
MediaPipeTasksStatus::kMetadataAssociatedFileNotFoundError);
|
||||
}
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
absl::string_view score_calibration_file,
|
||||
metadata_extractor.GetAssociatedFile(score_calibration_filename));
|
||||
ScoreCalibrationCalculatorOptions score_calibration_calculator_options;
|
||||
|
@ -393,13 +394,13 @@ absl::StatusOr<PostProcessingSpecs> BuildPostProcessingSpecs(
|
|||
metadata_extractor->GetOutputTensorMetadata();
|
||||
PostProcessingSpecs specs;
|
||||
specs.max_results = options.max_results();
|
||||
ASSIGN_OR_RETURN(specs.output_tensor_indices,
|
||||
GetOutputTensorIndices(output_tensors_metadata));
|
||||
MP_ASSIGN_OR_RETURN(specs.output_tensor_indices,
|
||||
GetOutputTensorIndices(output_tensors_metadata));
|
||||
// Extracts mandatory BoundingBoxProperties and performs sanity checks on the
|
||||
// fly.
|
||||
ASSIGN_OR_RETURN(const BoundingBoxProperties* bounding_box_properties,
|
||||
GetBoundingBoxProperties(*output_tensors_metadata->Get(
|
||||
specs.output_tensor_indices[0])));
|
||||
MP_ASSIGN_OR_RETURN(const BoundingBoxProperties* bounding_box_properties,
|
||||
GetBoundingBoxProperties(*output_tensors_metadata->Get(
|
||||
specs.output_tensor_indices[0])));
|
||||
if (bounding_box_properties->index() == nullptr) {
|
||||
specs.bounding_box_corners_order = {0, 1, 2, 3};
|
||||
} else {
|
||||
|
@ -415,7 +416,7 @@ absl::StatusOr<PostProcessingSpecs> BuildPostProcessingSpecs(
|
|||
// For models with in-model-nms, the label map is stored in the Category
|
||||
// tensor which use TENSOR_VALUE_LABELS. For models with out-of-model-nms, the
|
||||
// label map is stored in the Score tensor which use TENSOR_AXIS_LABELS.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
specs.label_items,
|
||||
GetLabelItemsIfAny(
|
||||
*metadata_extractor,
|
||||
|
@ -425,7 +426,7 @@ absl::StatusOr<PostProcessingSpecs> BuildPostProcessingSpecs(
|
|||
options.display_names_locale()));
|
||||
// Obtains allow/deny categories.
|
||||
specs.is_allowlist = !options.category_allowlist().empty();
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
specs.allow_or_deny_categories,
|
||||
GetAllowOrDenyCategoryIndicesIfAny(options, specs.label_items));
|
||||
|
||||
|
@ -433,7 +434,7 @@ absl::StatusOr<PostProcessingSpecs> BuildPostProcessingSpecs(
|
|||
if (options.has_score_threshold()) {
|
||||
specs.score_threshold = options.score_threshold();
|
||||
} else {
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
specs.score_threshold,
|
||||
GetScoreThreshold(
|
||||
*metadata_extractor,
|
||||
|
@ -444,7 +445,7 @@ absl::StatusOr<PostProcessingSpecs> BuildPostProcessingSpecs(
|
|||
}
|
||||
if (in_model_nms) {
|
||||
// Builds score calibration options (if available) from metadata.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
specs.score_calibration_options,
|
||||
GetScoreCalibrationOptionsIfAny(
|
||||
*metadata_extractor,
|
||||
|
@ -741,9 +742,9 @@ absl::Status ConfigureDetectionPostprocessingGraph(
|
|||
const ModelMetadataExtractor* metadata_extractor =
|
||||
model_resources.GetMetadataExtractor();
|
||||
if (in_model_nms) {
|
||||
ASSIGN_OR_RETURN(auto post_processing_specs,
|
||||
BuildInModelNmsPostProcessingSpecs(detector_options,
|
||||
metadata_extractor));
|
||||
MP_ASSIGN_OR_RETURN(auto post_processing_specs,
|
||||
BuildInModelNmsPostProcessingSpecs(detector_options,
|
||||
metadata_extractor));
|
||||
ConfigureInModelNmsTensorsToDetectionsCalculator(
|
||||
post_processing_specs, options.mutable_tensors_to_detections_options());
|
||||
ConfigureDetectionLabelIdToTextCalculator(
|
||||
|
@ -754,9 +755,9 @@ absl::Status ConfigureDetectionPostprocessingGraph(
|
|||
std::move(*post_processing_specs.score_calibration_options);
|
||||
}
|
||||
} else {
|
||||
ASSIGN_OR_RETURN(auto post_processing_specs,
|
||||
BuildOutModelNmsPostProcessingSpecs(detector_options,
|
||||
metadata_extractor));
|
||||
MP_ASSIGN_OR_RETURN(auto post_processing_specs,
|
||||
BuildOutModelNmsPostProcessingSpecs(
|
||||
detector_options, metadata_extractor));
|
||||
MP_RETURN_IF_ERROR(ConfigureOutModelNmsTensorsToDetectionsCalculator(
|
||||
metadata_extractor, post_processing_specs,
|
||||
options.mutable_tensors_to_detections_options()));
|
||||
|
@ -795,7 +796,7 @@ class DetectionPostprocessingGraph : public mediapipe::Subgraph {
|
|||
absl::StatusOr<mediapipe::CalculatorGraphConfig> GetConfig(
|
||||
mediapipe::SubgraphContext* sc) override {
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto output_streams,
|
||||
BuildDetectionPostprocessing(
|
||||
*sc->MutableOptions<proto::DetectionPostprocessingGraphOptions>(),
|
||||
|
@ -823,8 +824,8 @@ class DetectionPostprocessingGraph : public mediapipe::Subgraph {
|
|||
if (!graph_options.has_non_max_suppression_options()) {
|
||||
// Calculators to perform score calibration, if specified in the options.
|
||||
if (graph_options.has_score_calibration_options()) {
|
||||
ASSIGN_OR_RETURN(tensors_in,
|
||||
CalibrateScores(tensors_in, graph_options, graph));
|
||||
MP_ASSIGN_OR_RETURN(tensors_in,
|
||||
CalibrateScores(tensors_in, graph_options, graph));
|
||||
}
|
||||
// Calculator to convert output tensors to a detection proto vector.
|
||||
auto& tensors_to_detections =
|
||||
|
|
|
@ -318,8 +318,8 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
protected:
|
||||
absl::StatusOr<OutputStreamPoller> BuildGraph(
|
||||
absl::string_view model_name, const proto::DetectorOptions& options) {
|
||||
ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
MP_ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
|
||||
Graph graph;
|
||||
auto& postprocessing = graph.AddNode(
|
||||
|
@ -335,8 +335,8 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
postprocessing.Out(kDetectionsTag).SetName(std::string(kDetectionsName)) >>
|
||||
graph[Output<std::vector<Detection>>(kDetectionsTag)];
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig()));
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
std::string(kDetectionsName)));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
std::string(kDetectionsName)));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
|
|
|
@ -151,13 +151,13 @@ absl::Status ConfigureEmbeddingPostprocessingGraph(
|
|||
const ModelResources& model_resources,
|
||||
const proto::EmbedderOptions& embedder_options,
|
||||
proto::EmbeddingPostprocessingGraphOptions* options) {
|
||||
ASSIGN_OR_RETURN(bool has_quantized_outputs,
|
||||
HasQuantizedOutputs(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(bool has_quantized_outputs,
|
||||
HasQuantizedOutputs(model_resources));
|
||||
options->set_has_quantized_outputs(has_quantized_outputs);
|
||||
auto* tensors_to_embeddings_options =
|
||||
options->mutable_tensors_to_embeddings_options();
|
||||
*tensors_to_embeddings_options->mutable_embedder_options() = embedder_options;
|
||||
ASSIGN_OR_RETURN(auto head_names, GetHeadNames(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(auto head_names, GetHeadNames(model_resources));
|
||||
if (!head_names.empty()) {
|
||||
*tensors_to_embeddings_options->mutable_head_names() = {head_names.begin(),
|
||||
head_names.end()};
|
||||
|
@ -197,7 +197,7 @@ class EmbeddingPostprocessingGraph : public mediapipe::Subgraph {
|
|||
absl::StatusOr<mediapipe::CalculatorGraphConfig> GetConfig(
|
||||
mediapipe::SubgraphContext* sc) override {
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto output_streams,
|
||||
BuildEmbeddingPostprocessing(
|
||||
sc->Options<proto::EmbeddingPostprocessingGraphOptions>(),
|
||||
|
|
|
@ -159,8 +159,8 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
absl::string_view model_name, const proto::EmbedderOptions& options,
|
||||
bool connect_timestamps = false,
|
||||
const std::vector<absl::string_view>& ignored_head_names = {}) {
|
||||
ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
MP_ASSIGN_OR_RETURN(auto model_resources,
|
||||
CreateModelResourcesForModel(model_name));
|
||||
|
||||
Graph graph;
|
||||
auto& postprocessing = graph.AddNode(
|
||||
|
@ -192,13 +192,13 @@ class PostprocessingTest : public tflite::testing::Test {
|
|||
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig()));
|
||||
if (connect_timestamps) {
|
||||
ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedEmbeddingsName));
|
||||
MP_ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller(
|
||||
kTimestampedEmbeddingsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
ASSIGN_OR_RETURN(auto poller,
|
||||
calculator_graph_.AddOutputStreamPoller(kEmbeddingsName));
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
auto poller, calculator_graph_.AddOutputStreamPoller(kEmbeddingsName));
|
||||
MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{}));
|
||||
return poller;
|
||||
}
|
||||
|
|
|
@ -125,8 +125,8 @@ bool DetermineImagePreprocessingGpuBackend(
|
|||
absl::Status ConfigureImagePreprocessingGraph(
|
||||
const ModelResources& model_resources, bool use_gpu,
|
||||
proto::ImagePreprocessingGraphOptions* options) {
|
||||
ASSIGN_OR_RETURN(auto image_tensor_specs,
|
||||
vision::BuildInputImageTensorSpecs(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(auto image_tensor_specs,
|
||||
vision::BuildInputImageTensorSpecs(model_resources));
|
||||
MP_RETURN_IF_ERROR(ConfigureImageToTensorCalculator(
|
||||
image_tensor_specs, options->mutable_image_to_tensor_options()));
|
||||
// The GPU backend isn't able to process int data. If the input tensor is
|
||||
|
|
|
@ -180,8 +180,8 @@ absl::Status ConfigureTextPreprocessingGraph(
|
|||
MediaPipeTasksStatus::kInvalidArgumentError);
|
||||
}
|
||||
|
||||
ASSIGN_OR_RETURN(TextModelType::ModelType model_type,
|
||||
GetModelType(model_resources));
|
||||
MP_ASSIGN_OR_RETURN(TextModelType::ModelType model_type,
|
||||
GetModelType(model_resources));
|
||||
const tflite::SubGraph& model_graph =
|
||||
*(*model_resources.GetTfLiteModel()->subgraphs())[0];
|
||||
options.set_model_type(model_type);
|
||||
|
@ -193,13 +193,13 @@ absl::Status ConfigureTextPreprocessingGraph(
|
|||
}
|
||||
case TextModelType::BERT_MODEL:
|
||||
case TextModelType::REGEX_MODEL: {
|
||||
ASSIGN_OR_RETURN(int max_seq_len, GetMaxSeqLen(model_graph));
|
||||
MP_ASSIGN_OR_RETURN(int max_seq_len, GetMaxSeqLen(model_graph));
|
||||
options.set_max_seq_len(max_seq_len);
|
||||
}
|
||||
}
|
||||
if (model_type == TextModelType::BERT_MODEL) {
|
||||
ASSIGN_OR_RETURN(bool has_dynamic_input_tensors,
|
||||
HasDynamicInputTensors(model_graph));
|
||||
MP_ASSIGN_OR_RETURN(bool has_dynamic_input_tensors,
|
||||
HasDynamicInputTensors(model_graph));
|
||||
options.set_has_dynamic_input_tensors(has_dynamic_input_tensors);
|
||||
}
|
||||
return absl::OkStatus();
|
||||
|
@ -227,7 +227,7 @@ class TextPreprocessingGraph : public mediapipe::Subgraph {
|
|||
absl::StatusOr<mediapipe::CalculatorGraphConfig> GetConfig(
|
||||
mediapipe::SubgraphContext* sc) override {
|
||||
Graph graph;
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
Source<std::vector<Tensor>> tensors_in,
|
||||
BuildTextPreprocessing(
|
||||
sc->Options<TextPreprocessingGraphOptions>(),
|
||||
|
@ -242,8 +242,8 @@ class TextPreprocessingGraph : public mediapipe::Subgraph {
|
|||
absl::StatusOr<Source<std::vector<Tensor>>> BuildTextPreprocessing(
|
||||
const TextPreprocessingGraphOptions& options, Source<std::string> text_in,
|
||||
SideSource<ModelMetadataExtractor> metadata_extractor_in, Graph& graph) {
|
||||
ASSIGN_OR_RETURN(std::string preprocessor_name,
|
||||
GetCalculatorNameFromModelType(options.model_type()));
|
||||
MP_ASSIGN_OR_RETURN(std::string preprocessor_name,
|
||||
GetCalculatorNameFromModelType(options.model_type()));
|
||||
auto& text_preprocessor = graph.AddNode(preprocessor_name);
|
||||
switch (options.model_type()) {
|
||||
case TextModelType::UNSPECIFIED_MODEL:
|
||||
|
|
|
@ -122,8 +122,8 @@ absl::Status ExternalFileHandler::MapExternalFile() {
|
|||
// Obtain file descriptor, offset and size.
|
||||
int fd = -1;
|
||||
if (!external_file_.file_name().empty()) {
|
||||
ASSIGN_OR_RETURN(std::string file_name,
|
||||
PathToResourceAsFile(external_file_.file_name()));
|
||||
MP_ASSIGN_OR_RETURN(std::string file_name,
|
||||
PathToResourceAsFile(external_file_.file_name()));
|
||||
owned_fd_ = open(file_name.c_str(), O_RDONLY | O_BINARY);
|
||||
if (owned_fd_ < 0) {
|
||||
const std::string error_message = absl::StrFormat(
|
||||
|
|
|
@ -59,14 +59,14 @@ absl::Status ModelAssetBundleResources::ExtractFilesFromExternalFileProto() {
|
|||
if (model_asset_bundle_file_->has_file_name()) {
|
||||
// If the model asset bundle file name is a relative path, searches the file
|
||||
// in a platform-specific location and returns the absolute path on success.
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
std::string path_to_resource,
|
||||
mediapipe::PathToResourceAsFile(model_asset_bundle_file_->file_name()));
|
||||
model_asset_bundle_file_->set_file_name(path_to_resource);
|
||||
}
|
||||
ASSIGN_OR_RETURN(model_asset_bundle_file_handler_,
|
||||
ExternalFileHandler::CreateFromExternalFile(
|
||||
model_asset_bundle_file_.get()));
|
||||
MP_ASSIGN_OR_RETURN(model_asset_bundle_file_handler_,
|
||||
ExternalFileHandler::CreateFromExternalFile(
|
||||
model_asset_bundle_file_.get()));
|
||||
const char* buffer_data =
|
||||
model_asset_bundle_file_handler_->GetFileContent().data();
|
||||
size_t buffer_size =
|
||||
|
|
|
@ -110,12 +110,12 @@ absl::Status ModelResources::BuildModelFromExternalFileProto() {
|
|||
} else {
|
||||
// If the model file name is a relative path, searches the file in a
|
||||
// platform-specific location and returns the absolute path on success.
|
||||
ASSIGN_OR_RETURN(std::string path_to_resource,
|
||||
PathToResourceAsFile(model_file_->file_name()));
|
||||
MP_ASSIGN_OR_RETURN(std::string path_to_resource,
|
||||
PathToResourceAsFile(model_file_->file_name()));
|
||||
model_file_->set_file_name(path_to_resource);
|
||||
}
|
||||
}
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
model_file_handler_,
|
||||
ExternalFileHandler::CreateFromExternalFile(model_file_.get()));
|
||||
const char* buffer_data = model_file_handler_->GetFileContent().data();
|
||||
|
@ -152,9 +152,9 @@ absl::Status ModelResources::BuildModelFromExternalFileProto() {
|
|||
|
||||
model_packet_ = MakePacket<ModelPtr>(
|
||||
model.release(), [](tflite::FlatBufferModel* model) { delete model; });
|
||||
ASSIGN_OR_RETURN(auto model_metadata_extractor,
|
||||
metadata::ModelMetadataExtractor::CreateFromModelBuffer(
|
||||
buffer_data, buffer_size));
|
||||
MP_ASSIGN_OR_RETURN(auto model_metadata_extractor,
|
||||
metadata::ModelMetadataExtractor::CreateFromModelBuffer(
|
||||
buffer_data, buffer_size));
|
||||
metadata_extractor_packet_ = PacketAdopting<metadata::ModelMetadataExtractor>(
|
||||
std::move(model_metadata_extractor));
|
||||
return absl::OkStatus();
|
||||
|
|
|
@ -109,7 +109,7 @@ class ModelResourcesCalculator : public api2::Node {
|
|||
"ModelResourcesCacheService, and the CalculatorOptions has no "
|
||||
"'model_file' field to create a local ModelResources.");
|
||||
}
|
||||
ASSIGN_OR_RETURN(
|
||||
MP_ASSIGN_OR_RETURN(
|
||||
model_resources_,
|
||||
ModelResources::Create(
|
||||
"", std::make_unique<proto::ExternalFile>(options.model_file())));
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user