Project import generated by Copybara.

PiperOrigin-RevId: 254856010
This commit is contained in:
MediaPipe Team 2019-06-24 16:03:03 -07:00 committed by jqtang
parent fcb23fd99d
commit 2aaf4693db
21 changed files with 275 additions and 43 deletions

View File

@ -12,6 +12,8 @@ build --copt='-Wno-comment'
build --copt='-Wno-return-type'
build --copt='-Wno-unused-local-typedefs'
build --copt='-Wno-ignored-attributes'
# Temporarily set the incompatiblity flag for Bazel 0.27.0 and above
build --incompatible_disable_deprecated_attr_params=false
# Sets the default Apple platform to macOS.
build --apple_platform_type=macos

View File

@ -1,9 +1,7 @@
![MediaPipe](mediapipe/docs/images/mediapipe_small.png?raw=true "MediaPipe logo")
=======================================================================
#### We will be [presenting at CVPR 2019](https://sites.google.com/corp/view/perception-cv4arvr/mediapipe) on June 17~20 in Long Beach, CA. Come join us!
[MediaPipe](http://g.co/mediapipe) is a framework for building multimodal (eg. video, audio, any time series data) applied ML pipelines. With MediaPipe, a perception pipeline can be built as a graph of modular components, including, for instance, inference models (e.g., TensorFlow, TFLite) and media processing functions.
[MediaPipe](http://mediapipe.dev) is a framework for building multimodal (eg. video, audio, any time series data) applied ML pipelines. With MediaPipe, a perception pipeline can be built as a graph of modular components, including, for instance, inference models (e.g., TensorFlow, TFLite) and media processing functions. http://mediapipe.dev
![Real-time Face Detection](mediapipe/docs/images/realtime_face_detection.gif)
@ -14,13 +12,24 @@ Follow these [instructions](mediapipe/docs/install.md).
See mobile and desktop [examples](mediapipe/docs/examples.md).
## Documentation
[MediaPipe Read-the-Docs](https://mediapipe.readthedocs.io/).
[MediaPipe Read-the-Docs](https://mediapipe.readthedocs.io/) or [docs.mediapipe.dev](https://docs.mediapipe.dev)
Check out the [Examples page] for tutorials on how to use MediaPipe. [Concepts page](https://mediapipe.readthedocs.io/en/latest/concepts.html) for basic definitions
## Visualizing MediaPipe graphs
A web-based visualizer is hosted on [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/). Please also see instructions [here](mediapipe/docs/visualizer.md).
A web-based visualizer is hosted on [viz.mediapipe.dev](https://viz.mediapipe.dev/). Please also see instructions [here](mediapipe/docs/visualizer.md).
## Community forum
* [discuss](https://groups.google.com/forum/#!forum/mediapipe) - General community discussion around MediaPipe
## Publications
* [MediaPipe: A Framework for Building Perception Pipelines](https://tiny.cc/mediapipe_paper) (draft)
* [MediaPipe: A Framework for Building Perception Pipelines](https://arxiv.org/abs/1906.08172)
## Events
[Open sourced at CVPR 2019](https://sites.google.com/corp/view/perception-cv4arvr/mediapipe) on June 17~20 in Long Beach, CA
## Alpha Disclaimer
MediaPipe is currently in alpha for v0.5. We are still making breaking API changes and expect to get to stable API by v1.0.
## Contributing
We welcome contributions. Please follow these [guidelines](./CONTRIBUTING.md).

View File

@ -246,6 +246,23 @@ cc_library(
alwayslink = 1,
)
cc_library(
name = "image_cropping_calculator",
srcs = ["image_cropping_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
],
alwayslink = 1,
)
cc_library(
name = "luminance_calculator",
srcs = ["luminance_calculator.cc"],

View File

@ -0,0 +1,150 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/formats/rect.pb.h"
#include "mediapipe/framework/port/opencv_core_inc.h"
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status.h"
namespace mediapipe {
// Crops the input texture to the given rectangle region. The rectangle can
// be at arbitrary location on the image with rotation. If there's rotation, the
// output texture will have the size of the input rectangle. The rotation should
// be in radian, see rect.proto for detail.
// Currently it only works for CPU.
//
// Input:
// IMAGE: ImageFrame representing the input image.
// One of the following two tags:
// RECT - A Rect proto specifying the width/height and location of the
// cropping rectangle.
// NORM_RECT - A NormalizedRect proto specifying the width/height and location
// of the cropping rectangle in normalized coordinates.
//
// Output:
// IMAGE - Cropped frames.
class ImageCroppingCalculator : public CalculatorBase {
public:
ImageCroppingCalculator() = default;
~ImageCroppingCalculator() override = default;
static ::mediapipe::Status GetContract(CalculatorContract* cc);
::mediapipe::Status Process(CalculatorContext* cc) override;
private:
::mediapipe::Status RenderCpu(CalculatorContext* cc);
::mediapipe::Status RenderGpu(CalculatorContext* cc);
// TODO: Merge with GlCroppingCalculator to have GPU support.
bool use_gpu_{};
};
REGISTER_CALCULATOR(ImageCroppingCalculator);
::mediapipe::Status ImageCroppingCalculator::GetContract(
CalculatorContract* cc) {
RET_CHECK(cc->Inputs().HasTag("IMAGE"));
RET_CHECK(cc->Outputs().HasTag("IMAGE"));
cc->Inputs().Tag("IMAGE").Set<ImageFrame>();
if (cc->Inputs().HasTag("RECT")) {
cc->Inputs().Tag("RECT").Set<Rect>();
}
if (cc->Inputs().HasTag("NORM_RECT")) {
cc->Inputs().Tag("NORM_RECT").Set<NormalizedRect>();
}
cc->Outputs().Tag("IMAGE").Set<ImageFrame>();
return ::mediapipe::OkStatus();
}
::mediapipe::Status ImageCroppingCalculator::Process(CalculatorContext* cc) {
if (use_gpu_) {
RETURN_IF_ERROR(RenderGpu(cc));
} else {
RETURN_IF_ERROR(RenderCpu(cc));
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status ImageCroppingCalculator::RenderCpu(CalculatorContext* cc) {
const auto& input_img = cc->Inputs().Tag("IMAGE").Get<ImageFrame>();
cv::Mat input_mat = formats::MatView(&input_img);
float rect_center_x = input_img.Width() / 2.0f;
float rect_center_y = input_img.Height() / 2.0f;
float rotation = 0.0f;
int target_width = input_img.Width();
int target_height = input_img.Height();
if (cc->Inputs().HasTag("RECT")) {
const auto& rect = cc->Inputs().Tag("RECT").Get<Rect>();
if (rect.width() > 0 && rect.height() > 0 && rect.x_center() >= 0 &&
rect.y_center() >= 0) {
rotation = rect.rotation();
rect_center_x = rect.x_center();
rect_center_y = rect.y_center();
target_width = rect.width();
target_height = rect.height();
}
} else if (cc->Inputs().HasTag("NORM_RECT")) {
const auto& rect = cc->Inputs().Tag("NORM_RECT").Get<NormalizedRect>();
if (rect.width() > 0.0 && rect.height() > 0.0 && rect.x_center() >= 0.0 &&
rect.y_center() >= 0.0) {
rotation = rect.rotation();
rect_center_x = std::round(rect.x_center() * input_img.Width());
rect_center_y = std::round(rect.y_center() * input_img.Height());
target_width = std::round(rect.width() * input_img.Width());
target_height = std::round(rect.height() * input_img.Height());
}
}
cv::Mat rotated_mat;
if (std::abs(rotation) > 1e-5) {
// TODO: Use open source common math library.
const float pi = 3.1415926f;
rotation = rotation * 180.0 / pi;
// First rotation the image.
cv::Point2f src_center(rect_center_x, rect_center_y);
cv::Mat rotation_mat = cv::getRotationMatrix2D(src_center, rotation, 1.0);
cv::warpAffine(input_mat, rotated_mat, rotation_mat, input_mat.size());
} else {
input_mat.copyTo(rotated_mat);
}
// Then crop the requested area.
const cv::Rect cropping_rect(rect_center_x - target_width / 2,
rect_center_y - target_height / 2, target_width,
target_height);
cv::Mat cropped_image = cv::Mat(rotated_mat, cropping_rect);
std::unique_ptr<ImageFrame> output_frame(new ImageFrame(
input_img.Format(), cropped_image.cols, cropped_image.rows));
cv::Mat output_mat = formats::MatView(output_frame.get());
cropped_image.copyTo(output_mat);
cc->Outputs().Tag("IMAGE").Add(output_frame.release(), cc->InputTimestamp());
return ::mediapipe::OkStatus();
}
::mediapipe::Status ImageCroppingCalculator::RenderGpu(CalculatorContext* cc) {
return ::mediapipe::UnimplementedError("GPU support is not implemented yet.");
}
} // namespace mediapipe

View File

@ -300,6 +300,37 @@ REGISTER_CALCULATOR(ImageTransformationCalculator);
int input_width = cc->Inputs().Tag("IMAGE").Get<ImageFrame>().Width();
int input_height = cc->Inputs().Tag("IMAGE").Get<ImageFrame>().Height();
const auto& input_img = cc->Inputs().Tag("IMAGE").Get<ImageFrame>();
cv::Mat input_mat = formats::MatView(&input_img);
cv::Mat scaled_mat;
if (scale_mode_ == mediapipe::ScaleMode_Mode_STRETCH) {
cv::resize(input_mat, scaled_mat, cv::Size(output_width_, output_height_));
} else {
const float scale =
std::min(static_cast<float>(output_width_) / input_width,
static_cast<float>(output_height_) / input_height);
const int target_width = std::round(input_width * scale);
const int target_height = std::round(input_height * scale);
if (scale_mode_ == mediapipe::ScaleMode_Mode_FIT) {
cv::Mat intermediate_mat;
cv::resize(input_mat, intermediate_mat,
cv::Size(target_width, target_height));
const int top = (output_height_ - target_height) / 2;
const int bottom = output_height_ - target_height - top;
const int left = (output_width_ - target_width) / 2;
const int right = output_width_ - target_width - left;
cv::copyMakeBorder(intermediate_mat, scaled_mat, top, bottom, left, right,
options_.constant_padding() ? cv::BORDER_CONSTANT
: cv::BORDER_REPLICATE);
} else {
cv::resize(input_mat, scaled_mat, cv::Size(target_width, target_height));
output_width_ = target_width;
output_height_ = target_height;
}
}
int output_width;
int output_height;
ComputeOutputDimensions(input_width, input_height, &output_width,
@ -318,26 +349,15 @@ REGISTER_CALCULATOR(ImageTransformationCalculator);
cc->InputSidePackets().Tag("ROTATION_DEGREES").Get<int>());
}
const auto& input_img = cc->Inputs().Tag("IMAGE").Get<ImageFrame>();
std::unique_ptr<ImageFrame> output_frame(
new ImageFrame(input_img.Format(), output_width, output_height));
cv::Mat input_mat = formats::MatView(&input_img);
cv::Mat output_mat = formats::MatView(output_frame.get());
cv::Mat scaled_mat;
if (scale_mode_ != mediapipe::ScaleMode_Mode_STRETCH) {
// TODO finish CPU version features.
return ::mediapipe::UnimplementedError(
"Only STRETCH scale mode currently supported.");
}
cv::resize(input_mat, scaled_mat, cv::Size(output_width_, output_height_));
cv::Mat rotated_mat;
const int angle = RotationModeToDegrees(rotation_);
cv::Point2f src_center(scaled_mat.cols / 2.0, scaled_mat.rows / 2.0);
cv::Mat rotation_mat = cv::getRotationMatrix2D(src_center, angle, 1.0);
cv::warpAffine(scaled_mat, rotated_mat, rotation_mat, scaled_mat.size());
std::unique_ptr<ImageFrame> output_frame(
new ImageFrame(input_img.Format(), output_width, output_height));
cv::Mat output_mat = formats::MatView(output_frame.get());
rotated_mat.copyTo(output_mat);
cc->Outputs().Tag("IMAGE").Add(output_frame.release(), cc->InputTimestamp());

View File

@ -46,4 +46,8 @@ message ImageTransformationCalculatorOptions {
optional bool flip_horizontally = 5 [default = false];
// Scale mode.
optional ScaleMode.Mode scale_mode = 6;
// Padding type. This option is only used when the scale mode is FIT.
// Default is to use BORDER_CONSTANT. If set to false, it will use
// BORDER_REPLICATE instead.
optional bool constant_padding = 7 [default = true];
}

View File

@ -84,6 +84,11 @@ class DetectionLetterboxRemovalCalculator : public CalculatorBase {
}
::mediapipe::Status Process(CalculatorContext* cc) override {
// Only process if there's input detections.
if (cc->Inputs().Tag(kDetectionsTag).IsEmpty()) {
return ::mediapipe::OkStatus();
}
const auto& input_detections =
cc->Inputs().Tag(kDetectionsTag).Get<std::vector<Detection>>();
const auto& letterbox_padding =

View File

@ -323,6 +323,9 @@ class NonMaxSuppressionCalculator : public CalculatorBase {
}
auto weighted_detection = detection;
if (!candidates.empty()) {
const int num_keypoints =
detection.location_data().relative_keypoints_size();
std::vector<float> keypoints(num_keypoints * 2);
float w_xmin = 0.0f;
float w_ymin = 0.0f;
float w_xmax = 0.0f;
@ -330,13 +333,20 @@ class NonMaxSuppressionCalculator : public CalculatorBase {
float total_score = 0.0f;
for (const auto& candidate : candidates) {
total_score += candidate.second;
const auto& bbox = detections[candidate.first]
.location_data()
.relative_bounding_box();
const auto& location_data =
detections[candidate.first].location_data();
const auto& bbox = location_data.relative_bounding_box();
w_xmin += bbox.xmin() * candidate.second;
w_ymin += bbox.ymin() * candidate.second;
w_xmax += (bbox.xmin() + bbox.width()) * candidate.second;
w_ymax += (bbox.ymin() + bbox.height()) * candidate.second;
for (int i = 0; i < num_keypoints; ++i) {
keypoints[i * 2] +=
location_data.relative_keypoints(i).x() * candidate.second;
keypoints[i * 2 + 1] +=
location_data.relative_keypoints(i).y() * candidate.second;
}
}
auto* weighted_location = weighted_detection.mutable_location_data()
->mutable_relative_bounding_box();
@ -346,6 +356,12 @@ class NonMaxSuppressionCalculator : public CalculatorBase {
weighted_location->xmin());
weighted_location->set_height((w_ymax / total_score) -
weighted_location->ymin());
for (int i = 0; i < num_keypoints; ++i) {
auto* keypoint = weighted_detection.mutable_location_data()
->mutable_relative_keypoints(i);
keypoint->set_x(keypoints[i * 2] / total_score);
keypoint->set_y(keypoints[i * 2 + 1] / total_score);
}
}
remained_indexed_scores = std::move(remained);
output_detections->push_back(weighted_detection);

View File

@ -29,7 +29,7 @@ adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/a
![face_detection_android_gpu_graph](images/mobile/face_detection_android_gpu.png){width="400"}
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/).
below and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/).
```bash
# MediaPipe graph that performs object detection with TensorFlow Lite on GPU.

View File

@ -29,7 +29,7 @@ adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/a
![hair_segmentation_android_gpu_graph](images/mobile/hair_segmentation_android_gpu.png){width="600"}
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/).
below and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/).
```bash
# MediaPipe graph that performs hair segmentation with TensorFlow Lite on GPU.

View File

@ -49,7 +49,7 @@
```
You can visualize this graph using
[MediaPipe Visualizer](https://mediapipe-viz.appspot.com) by pasting the
[MediaPipe Visualizer](https://viz.mediapipe.dev) by pasting the
CalculatorGraphConfig content below into the visualizer. See
[here](./visualizer.md) for help on the visualizer.

View File

@ -31,6 +31,10 @@ APIs for MediaPipe
* Graph Execution API in Java (Android)
* (Coming Soon) Graph Execution API in Objective-C (iOS)
Alpha Disclaimer
==================
MediaPipe is currently in alpha for v0.5. We are still making breaking API changes and expect to get to stable API by v1.0. We recommend that you target a specific version of MediaPipe, and periodically bump to the latest release. That way you have control over when a breaking change affects you.
User Documentation
==================

View File

@ -23,7 +23,7 @@ Required libraries
* Android SDK release 28.0.3 and above
* Android NDK r18b and above
* Android NDK r17c and above
### Installing on Debian and Ubuntu

View File

@ -37,7 +37,7 @@ adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/a
![object_detection_android_cpu_graph](images/mobile/object_detection_android_cpu.png){width="400"}
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/).
below and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/).
```bash
# MediaPipe graph that performs object detection with TensorFlow Lite on CPU.

View File

@ -29,7 +29,7 @@ adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/a
![object_detection_android_gpu_graph](images/mobile/object_detection_android_gpu.png){width="400"}
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/).
below and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev/).
```bash
# MediaPipe graph that performs object detection with TensorFlow Lite on GPU.

View File

@ -8,7 +8,14 @@ interested in running the same TensorfFlow Lite model on Android, please see the
[Object Detection on GPU on Android](object_detection_android_gpu.md) and
[Object Detection on CPU on Android](object_detection_android_cpu.md) examples.
### TensorFlow Model
We show the object detection demo with both TensorFlow model and TensorFlow Lite model:
- [TensorFlow Object Detection Demo](#tensorflow-object-detection-demo)
- [TensorFlow Lite Object Detection Demo](#tensorflow-lite-object-detection-demo)
Note: If MediaPipe depends on OpenCV 2, please see the [known issues with OpenCV 2](#known-issues-with-opencv-2) section.
### TensorFlow Object Detection Demo
To build and run the TensorFlow example on desktop, run:
@ -40,7 +47,7 @@ $ bazel-bin/mediapipe/examples/desktop/object_detection/object_detection_tensorf
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into
[MediaPipe Visualizer](https://mediapipe-viz.appspot.com).
[MediaPipe Visualizer](https://viz.mediapipe.dev).
```bash
# MediaPipe graph that performs object detection on desktop with TensorFlow
@ -176,7 +183,7 @@ node {
}
```
### TensorFlow Lite Model
### TensorFlow Lite Object Detection Demo
To build and run the TensorFlow Lite example on desktop, run:
@ -204,7 +211,7 @@ $ bazel-bin/mediapipe/examples/desktop/object_detection/object_detection_tflite
To visualize the graph as shown above, copy the text specification of the graph
below and paste it into
[MediaPipe Visualizer](https://mediapipe-viz.appspot.com).
[MediaPipe Visualizer](https://viz.mediapipe.dev).
```bash
# MediaPipe graph that performs object detection on desktop with TensorFlow Lite

View File

@ -5,7 +5,7 @@
To help users understand the structure of their calculator graphs and to
understand the overall behavior of their machine learning inference pipelines,
we have built the [MediaPipe Visualizer](https://mediapipe-viz.appspot.com/) that is available online.
we have built the [MediaPipe Visualizer](https://viz.mediapipe.dev/) that is available online.
* A graph view allows users to see a connected calculator graph as expressed
through a graph configuration that is pasted into the graph editor or

View File

@ -33,7 +33,7 @@ def _canonicalize_proto_path_oss(all_protos, genfile_path):
"""
proto_paths = []
proto_file_names = []
for s in all_protos:
for s in all_protos.to_list():
if s.path.startswith(genfile_path):
repo_name, _, file_name = s.path[len(genfile_path + "/external/"):].partition("/")
proto_paths.append(genfile_path + "/external/" + repo_name)
@ -60,7 +60,7 @@ def _encode_binary_proto_impl(ctx):
# order of gendir before ., is needed for the proto compiler to resolve
# import statements that reference proto files produced by a genrule.
ctx.actions.run_shell(
inputs = list(all_protos) + [textpb, ctx.executable._proto_compiler],
tools = all_protos.to_list() + [textpb, ctx.executable._proto_compiler],
outputs = [binarypb],
command = " ".join(
[
@ -110,7 +110,7 @@ def _generate_proto_descriptor_set_impl(ctx):
# order of gendir before ., is needed for the proto compiler to resolve
# import statements that reference proto files produced by a genrule.
ctx.actions.run(
inputs = list(all_protos) + [ctx.executable._proto_compiler],
inputs = all_protos.to_list() + [ctx.executable._proto_compiler],
outputs = [descriptor],
executable = ctx.executable._proto_compiler,
arguments = [
@ -119,7 +119,7 @@ def _generate_proto_descriptor_set_impl(ctx):
"--proto_path=" + ctx.genfiles_dir.path,
"--proto_path=.",
] +
[s.path for s in all_protos],
[s.path for s in all_protos.to_list()],
mnemonic = "GenerateProtoDescriptor",
)

View File

@ -24,7 +24,6 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library"
proto_library(
name = "sky_light_calculator_proto",
srcs = ["sky_light_calculator.proto"],
visibility = ["//mediapipe:__subpackages__"],
deps = ["//mediapipe/framework:calculator_proto"],
)
@ -39,7 +38,6 @@ mediapipe_cc_proto_library(
proto_library(
name = "night_light_calculator_proto",
srcs = ["night_light_calculator.proto"],
visibility = ["//mediapipe:__subpackages__"],
deps = ["//mediapipe/framework:calculator_proto"],
)
@ -54,7 +52,6 @@ mediapipe_cc_proto_library(
proto_library(
name = "zoo_mutator_proto",
srcs = ["zoo_mutator.proto"],
visibility = ["//mediapipe:__subpackages__"],
deps = ["@protobuf_archive//:any_proto"],
)

View File

@ -94,7 +94,7 @@ def _transitive_protos_aspect_impl(target, ctx):
proto_libs = []
if ctx.rule.kind == "proto_library":
proto_libs = [f for f in target.files if f.extension == "a"]
proto_libs = [f for f in target.files.to_list() if f.extension == "a"]
# Searching through the hdrs attribute is necessary because of
# portable_proto_library. In portable mode, that macro

View File

@ -77,7 +77,8 @@ bool GlTextureBuffer::CreateInternal(const void* data) {
// TODO: maybe we do not actually have to wait for the
// consumer sync here. Check docs.
sync_token->WaitOnGpu();
if (glIsTexture(name_to_delete)) glDeleteTextures(1, &name_to_delete);
DCHECK(glIsTexture(name_to_delete));
glDeleteTextures(1, &name_to_delete);
});
};