Color slider implemented with RGB vectors for changing hair color in hairsegmentationgpu in real-time

This commit is contained in:
Prince Patel 2022-03-06 17:47:43 +05:30
parent e6c19885c6
commit a37c0198a8
13 changed files with 567 additions and 21 deletions

View File

@ -413,3 +413,5 @@ libedgetpu_dependencies()
load("@coral_crosstool//:configure.bzl", "cc_crosstool")
cc_crosstool(name = "crosstool")
android_sdk_repository(name = "androidsdk", build_tools_version = "30.0.2")
android_ndk_repository(name = "androidndk", api_level = 20)

View File

@ -370,6 +370,18 @@ cc_library(
alwayslink = 1,
)
cc_library(
name = "color_slider_calculator",
srcs = ["color_slider_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
],
alwayslink = 1,
)
cc_library(
name = "scale_image_utils",
srcs = ["scale_image_utils.cc"],

View File

@ -0,0 +1,87 @@
// Copyright 2019 Prince Patel
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/port/ret_check.h"
#include <vector>
// This calculator supports a input stream with Color values of Red, Green, and Blue
// TODO: Remove this requirement by replacing the typed input streams
//
// input_stream: "Red"
// input_stream: "Green"
// input_stream: "Green"
//
// Output streams:
// RGB_OUT: The output video strem with array of values.
//
namespace mediapipe {
class ColorSliderCalculator : public CalculatorBase {
public:
ColorSliderCalculator() = default;
~ColorSliderCalculator() override = default;
static absl::Status GetContract(CalculatorContract* cc);
absl::Status Open(CalculatorContext* cc) override;
absl::Status Process(CalculatorContext* cc) override;
void make_array(int r,int g,int b,std::array<int,3>* out);
};
REGISTER_CALCULATOR(ColorSliderCalculator);
//static
absl::Status ColorSliderCalculator::GetContract (CalculatorContract *cc){
cc->Inputs().Index(0).Set<int>();
cc->Inputs().Index(1).Set<int>();
cc->Inputs().Index(2).Set<int>();
if (cc->Outputs().HasTag("RGB_OUT")){
cc->Outputs().Tag("RGB_OUT").Set<std::array<int,3>>();
}
return absl::OkStatus();
}
absl::Status ColorSliderCalculator::Open(CalculatorContext* cc) {
cc->SetOffset(TimestampDiff(0));
return absl::OkStatus();
}
absl::Status ColorSliderCalculator::Process(CalculatorContext* cc) {
if (cc->Inputs().NumEntries() == 0) {
return tool::StatusStop();
}
int red_buffer = cc->Inputs().Index(0).Value().Get<int>();
int green_buffer = cc->Inputs().Index(1).Value().Get<int>();
int blue_buffer = cc->Inputs().Index(2).Value().Get<int>();
auto out = absl::make_unique<std::array<int,3>>();
make_array(red_buffer,green_buffer,blue_buffer, out.get());
cc->Outputs().Tag("RGB_OUT").Add(out.release(), cc->InputTimestamp());
LOG(INFO) << "Color Slider Calculator Runner" << red_buffer << " " << green_buffer << " " << blue_buffer << "\n";
return absl::OkStatus();
}
void ColorSliderCalculator::make_array(int r,int g,int b,std::array<int,3>* out){
(*out)[0] =r;
(*out)[1] =g;
(*out)[2] =b;
}
} // namespace mediapipe

View File

@ -28,6 +28,7 @@
#include "mediapipe/gpu/gl_calculator_helper.h"
#include "mediapipe/gpu/gl_simple_shaders.h"
#include "mediapipe/gpu/shader_util.h"
#include "mediapipe/gpu/gpu_buffer.h"
#endif // !MEDIAPIPE_DISABLE_GPU
namespace {
@ -37,6 +38,7 @@ constexpr char kImageFrameTag[] = "IMAGE";
constexpr char kMaskCpuTag[] = "MASK";
constexpr char kGpuBufferTag[] = "IMAGE_GPU";
constexpr char kMaskGpuTag[] = "MASK_GPU";
constexpr char kRgbOutTag[] = "RGB_OUT";
inline cv::Vec3b Blend(const cv::Vec3b& color1, const cv::Vec3b& color2,
float weight, int invert_mask,
@ -69,6 +71,7 @@ namespace mediapipe {
// MASK: An ImageFrame input mask in ImageFormat::GRAY8, SRGB, SRGBA, or
// VEC32F1
// MASK_GPU: A GpuBuffer input mask, RGBA.
// RGB_OUT: A vector of RGB values
// Output:
// One of the following IMAGE tags:
// IMAGE: An ImageFrame output image.
@ -114,6 +117,7 @@ class RecolorCalculator : public CalculatorBase {
bool initialized_ = false;
std::vector<uint8> color_;
std::vector<uint8> my_color = {0,0,0};
mediapipe::RecolorCalculatorOptions::MaskChannel mask_channel_;
bool use_gpu_ = false;
@ -153,6 +157,10 @@ absl::Status RecolorCalculator::GetContract(CalculatorContract* cc) {
cc->Inputs().Tag(kMaskCpuTag).Set<ImageFrame>();
}
if (cc->Inputs().HasTag(kRgbOutTag)) {
cc->Inputs().Tag(kRgbOutTag).Set<std::array<int,3>>();
}
#if !MEDIAPIPE_DISABLE_GPU
if (cc->Outputs().HasTag(kGpuBufferTag)) {
cc->Outputs().Tag(kGpuBufferTag).Set<mediapipe::GpuBuffer>();
@ -308,9 +316,16 @@ absl::Status RecolorCalculator::RenderGpu(CalculatorContext* cc) {
// Get inputs and setup output.
const Packet& input_packet = cc->Inputs().Tag(kGpuBufferTag).Value();
const Packet& mask_packet = cc->Inputs().Tag(kMaskGpuTag).Value();
const Packet& rgb_packet = cc->Inputs().Tag(kRgbOutTag).Value();
const auto& input_buffer = input_packet.Get<mediapipe::GpuBuffer>();
const auto& mask_buffer = mask_packet.Get<mediapipe::GpuBuffer>();
const auto& rgb_buffer = rgb_packet.Get<std::array<int,3>>();
my_color[0] = rgb_buffer[0] / 255.0;
my_color[1] = rgb_buffer[1] / 255.0;
my_color[2] = rgb_buffer[2] / 255.0;
auto img_tex = gpu_helper_.CreateSourceTexture(input_buffer);
auto mask_tex = gpu_helper_.CreateSourceTexture(mask_buffer);
@ -365,6 +380,7 @@ void RecolorCalculator::GlRender() {
// program
glUseProgram(program_);
glUniform3f(glGetUniformLocation(program_, "recolor"), my_color[0], my_color[1], my_color[2]);
// vertex storage
GLuint vbo[2];
@ -492,8 +508,8 @@ absl::Status RecolorCalculator::InitGpu(CalculatorContext* cc) {
glUseProgram(program_);
glUniform1i(glGetUniformLocation(program_, "frame"), 1);
glUniform1i(glGetUniformLocation(program_, "mask"), 2);
glUniform3f(glGetUniformLocation(program_, "recolor"), color_[0] / 255.0,
color_[1] / 255.0, color_[2] / 255.0);
glUniform3f(glGetUniformLocation(program_, "recolor"), my_color[0],
my_color[1], my_color[2]);
glUniform1f(glGetUniformLocation(program_, "invert_mask"),
invert_mask_ ? 1.0f : 0.0f);
glUniform1f(glGetUniformLocation(program_, "adjust_with_luminance"),

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.mediapipe.apps.hairsegmentationgpu">
<uses-sdk
android:minSdkVersion="27"
android:targetSdkVersion="31" />
<!-- For using the camera -->
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<!-- For MediaPipe -->
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<application
android:allowBackup="true"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:exported="true"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"])
licenses(["notice"]) # Apache 2.0
package(default_visibility = ["//visibility:private"])
@ -32,29 +32,49 @@ cc_library(
alwayslink = 1,
)
android_binary(
name = "hairsegmentationgpu",
# Maps the binary graph to an alias (e.g., the app name) for convenience so that the alias can be
# easily incorporated into the app via, for example,
# MainActivity.BINARY_GRAPH_NAME = "appname.binarypb".
genrule(
name = "binary_graph",
srcs = ["//mediapipe/graphs/hair_segmentation:mobile_gpu_binary_graph"],
outs = ["hairsegmentationgpu.binarypb"],
cmd = "cp $< $@",
)
android_library(
name = "mediapipe_lib",
srcs = glob(["*.java"]),
assets = [
"//mediapipe/graphs/hair_segmentation:mobile_gpu.binarypb",
":binary_graph",
"//mediapipe/models:hair_segmentation.tflite",
],
assets_dir = "",
manifest = "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:AndroidManifest.xml",
manifest_values = {
"applicationId": "com.google.mediapipe.apps.hairsegmentationgpu",
"appName": "Hair Segmentation",
"mainActivity": "com.google.mediapipe.apps.basic.MainActivity",
"cameraFacingFront": "True",
"binaryGraphName": "mobile_gpu.binarypb",
"inputVideoStreamName": "input_video",
"outputVideoStreamName": "output_video",
"flipFramesVertically": "True",
"converterNumBuffers": "2",
},
multidex = "native",
manifest = "AndroidManifest.xml",
resource_files = glob(["res/**"]),
deps = [
":mediapipe_jni_lib",
"//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib",
"//mediapipe/java/com/google/mediapipe/components:android_camerax_helper",
"//mediapipe/java/com/google/mediapipe/components:android_components",
"//mediapipe/java/com/google/mediapipe/framework:android_framework",
"//mediapipe/java/com/google/mediapipe/glutil",
"//third_party:androidx_appcompat",
"//third_party:androidx_constraint_layout",
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_material",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:com_google_guava_guava",
],
)
android_binary(
name = "hairsegmentationgpu",
manifest = "AndroidManifest.xml",
manifest_values = {"applicationId": "com.google.mediapipe.apps.hairsegmentationgpu"},
multidex = "native",
deps = [
":mediapipe_lib",
],
)

View File

@ -0,0 +1,284 @@
package com.google.mediapipe.apps.hairsegmentationgpu;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.SeekBar;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.CameraXPreviewHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.glutil.EglManager;
import com.google.mediapipe.framework.Packet;
/**
* Main activity of MediaPipe example apps.
*/
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final String BINARY_GRAPH_NAME = "hairsegmentationgpu.binarypb";
private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
private static final String RED_INPUT_STREAM ="red";
private static final String GREEN_INPUT_STREAM ="green";
private static final String BLUE_INPUT_STREAM ="blue";
private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
private static final boolean FLIP_FRAMES_VERTICALLY = true;
protected int red_progress = 0;
protected int blue_progress = 0;
protected int green_progress = 0;
public Packet red_packet;
public Packet green_packet;
public Packet blue_packet;
SeekBar red_seekBar;
SeekBar green_seekBar;
SeekBar blue_seekBar;
static {
// Load all native libraries needed by the app.
System.loadLibrary("mediapipe_jni");
System.loadLibrary("opencv_java3");
}
// {@link SurfaceTexture} where the camera-preview frames can be accessed.
private SurfaceTexture previewFrameTexture;
// {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
private SurfaceView previewDisplayView;
// Creates and manages an {@link EGLContext}.
private EglManager eglManager;
// Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
// frames onto a {@link Surface}.
private FrameProcessor processor;
// Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
// consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
private ExternalTextureConverter converter;
// Handles camera access via the {@link CameraX} Jetpack support library.
private CameraXPreviewHelper cameraHelper;
private RGBHandler rgbHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
red_seekBar = (SeekBar) findViewById(R.id.red_seekbar);
blue_seekBar = (SeekBar) findViewById(R.id.green_seekbar);
green_seekBar = (SeekBar) findViewById(R.id.blue_seekbar);
red_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
red_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
green_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
green_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
blue_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
blue_progress= i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
previewDisplayView = new SurfaceView(this);
setupPreviewDisplayView();
// Initilize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
// binary graphs.
AndroidAssetUtil.initializeNativeAssetManager(this);
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
rgbHandler = new RGBHandler();
processor.setOnWillAddFrameListener(rgbHandler);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
PermissionHelper.checkAndRequestCameraPermissions(this);
}
private class RGBHandler implements FrameProcessor.OnWillAddFrameListener
{
@Override
public void onWillAddFrame(long timestamp){
red_packet = processor.getPacketCreator().createInt32(red_progress);
green_packet = processor.getPacketCreator().createInt32(green_progress);
blue_packet = processor.getPacketCreator().createInt32(blue_progress);
processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM,
red_packet, timestamp);
processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM,
green_packet, timestamp);
processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM,
blue_packet, timestamp);
red_packet.release();
green_packet.release();
blue_packet.release();
}
}
//new
@Override
protected void onResume() {
super.onResume();
converter = new ExternalTextureConverter(eglManager.getContext());
converter.setFlipY(FLIP_FRAMES_VERTICALLY);
converter.setConsumer(processor);
if (PermissionHelper.cameraPermissionsGranted(this)) {
startCamera();
}
}
@Override
protected void onPause() {
super.onPause();
converter.close();
// Hide preview display until we re-open the camera again.
previewDisplayView.setVisibility(View.GONE);
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
protected void onCameraStarted(SurfaceTexture surfaceTexture) {
previewFrameTexture = surfaceTexture;
// Make the display view visible to start showing the preview. This triggers the
// SurfaceHolder.Callback added to (the holder of) previewDisplayView.
previewDisplayView.setVisibility(View.VISIBLE);
}
protected Size cameraTargetResolution() {
return null; // No preference and let the camera (helper) decide.
}
public void startCamera() {
cameraHelper = new CameraXPreviewHelper();
cameraHelper.setOnCameraStartedListener(
surfaceTexture -> {
onCameraStarted(surfaceTexture);
});
CameraHelper.CameraFacing cameraFacing = CameraHelper.CameraFacing.FRONT;
cameraHelper.startCamera(
this, cameraFacing, /*surfaceTexture=*/ null, cameraTargetResolution());
}
protected Size computeViewSize(int width, int height) {
return new Size(width, height);
}
protected void onPreviewDisplaySurfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
// (Re-)Compute the ideal size of the camera-preview display (the area that the
// camera-preview frames get rendered onto, potentially with scaling and rotation)
// based on the size of the SurfaceView that contains the display.
Size viewSize = computeViewSize(width, height);
Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
boolean isCameraRotated = cameraHelper.isCameraRotated();
// Connect the converter to the camera-preview frames as its input (via
// previewFrameTexture), and configure the output width and height as the computed
// display size.
converter.setSurfaceTextureAndAttachToGLContext(
previewFrameTexture,
isCameraRotated ? displaySize.getHeight() : displaySize.getWidth(),
isCameraRotated ? displaySize.getWidth() : displaySize.getHeight());
}
private void setupPreviewDisplayView() {
previewDisplayView.setVisibility(View.GONE);
ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
viewGroup.addView(previewDisplayView);
previewDisplayView
.getHolder()
.addCallback(
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
onPreviewDisplaySurfaceChanged(holder, format, width, height);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(null);
}
});
}
}

View File

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<FrameLayout
android:id="@+id/preview_display_layout"
android:layout_width="fill_parent"
android:layout_height="550dp"
tools:ignore="MissingConstraints">
<TextView
android:id="@+id/no_camera_access_view"
android:layout_width="fill_parent"
android:layout_height="match_parent"
android:gravity="center"
android:text="@string/no_camera_access" />
</FrameLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@+id/preview_display_layout">
<SeekBar
android:id="@+id/red_seekbar"
android:progress="0"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:max="255" />
<SeekBar
android:id="@+id/green_seekbar"
android:progress="0"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:layout_below = "@+id/red_seekbar"
android:max="255" />
<SeekBar
android:id="@+id/blue_seekbar"
android:progress="255"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:layout_below="@+id/green_seekbar"
android:max="255" />
</RelativeLayout>
</RelativeLayout>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>

View File

@ -0,0 +1,4 @@
<resources>
<string name="app_name" translatable="false">Hair Segmentation GPU</string>
<string name="no_camera_access" translatable="false">Please grant camera permissions.</string>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -25,8 +25,10 @@ cc_library(
name = "mobile_calculators",
deps = [
"//mediapipe/calculators/core:flow_limiter_calculator",
"//mediapipe/calculators/core:packet_cloner_calculator",
"//mediapipe/calculators/core:previous_loopback_calculator",
"//mediapipe/calculators/image:image_transformation_calculator",
"//mediapipe/calculators/image:color_slider_calculator",
"//mediapipe/calculators/image:recolor_calculator",
"//mediapipe/calculators/image:set_alpha_calculator",
"//mediapipe/calculators/tflite:tflite_converter_calculator",

View File

@ -4,6 +4,9 @@
# Images on GPU coming into and out of the graph.
input_stream: "input_video"
input_stream: "red"
input_stream: "green"
input_stream: "blue"
output_stream: "output_video"
# Throttles the images flowing downstream for flow control. It passes through
@ -111,7 +114,7 @@ node {
input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver"
node_options: {
[type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] {
model_path: "mediapipe/models/hair_segmentation.tflite"
model_path: "hair_segmentation.tflite"
use_gpu: true
}
}
@ -136,12 +139,21 @@ node {
}
}
}
# Feature : Color slider for sliding any color values(R:, G:, B:)
node{
calculator: "ColorSliderCalculator"
input_stream: "red"
input_stream: "green"
input_stream: "blue"
output_stream: "RGB_OUT:rgb_array"
}
# Colors the hair segmentation with the color specified in the option.
node {
calculator: "RecolorCalculator"
input_stream: "IMAGE_GPU:throttled_input_video"
input_stream: "MASK_GPU:hair_mask"
input_stream: "RGB_OUT:rgb_array"
output_stream: "IMAGE_GPU:output_video"
node_options: {
[type.googleapis.com/mediapipe.RecolorCalculatorOptions] {