From a37c0198a87238938ed597111c04298077873a72 Mon Sep 17 00:00:00 2001 From: Prince Patel Date: Sun, 6 Mar 2022 17:47:43 +0530 Subject: [PATCH] Color slider implemented with RGB vectors for changing hair color in hairsegmentationgpu in real-time --- WORKSPACE | 2 + mediapipe/calculators/image/BUILD | 12 + .../image/color_slider_calculator.cc | 87 ++++++ .../calculators/image/recolor_calculator.cc | 20 +- .../hairsegmentationgpu/AndroidManifest.xml | 33 ++ .../mediapipe/apps/hairsegmentationgpu/BUILD | 56 ++-- .../hairsegmentationgpu/MainActivity.java | 284 ++++++++++++++++++ .../res/layout/activity_main.xml | 57 ++++ .../hairsegmentationgpu/res/values/colors.xml | 6 + .../res/values/strings.xml | 4 + .../hairsegmentationgpu/res/values/styles.xml | 11 + mediapipe/graphs/hair_segmentation/BUILD | 2 + .../hair_segmentation_mobile_gpu.pbtxt | 14 +- 13 files changed, 567 insertions(+), 21 deletions(-) create mode 100644 mediapipe/calculators/image/color_slider_calculator.cc create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/AndroidManifest.xml create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/MainActivity.java create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/layout/activity_main.xml create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/colors.xml create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/strings.xml create mode 100644 mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/styles.xml diff --git a/WORKSPACE b/WORKSPACE index 633169032..8442b3e82 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -413,3 +413,5 @@ libedgetpu_dependencies() load("@coral_crosstool//:configure.bzl", "cc_crosstool") cc_crosstool(name = "crosstool") +android_sdk_repository(name = "androidsdk", build_tools_version = "30.0.2") +android_ndk_repository(name = "androidndk", api_level = 20) diff --git a/mediapipe/calculators/image/BUILD b/mediapipe/calculators/image/BUILD index 0bbfadd05..bb2f9a964 100644 --- a/mediapipe/calculators/image/BUILD +++ b/mediapipe/calculators/image/BUILD @@ -370,6 +370,18 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "color_slider_calculator", + srcs = ["color_slider_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + ], + alwayslink = 1, +) + cc_library( name = "scale_image_utils", srcs = ["scale_image_utils.cc"], diff --git a/mediapipe/calculators/image/color_slider_calculator.cc b/mediapipe/calculators/image/color_slider_calculator.cc new file mode 100644 index 000000000..4ff4f63c6 --- /dev/null +++ b/mediapipe/calculators/image/color_slider_calculator.cc @@ -0,0 +1,87 @@ +// Copyright 2019 Prince Patel +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/port/ret_check.h" +#include + + +// This calculator supports a input stream with Color values of Red, Green, and Blue +// TODO: Remove this requirement by replacing the typed input streams +// +// input_stream: "Red" +// input_stream: "Green" +// input_stream: "Green" +// +// Output streams: +// RGB_OUT: The output video strem with array of values. +// + + +namespace mediapipe { + +class ColorSliderCalculator : public CalculatorBase { + public: + ColorSliderCalculator() = default; + ~ColorSliderCalculator() override = default; + + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + void make_array(int r,int g,int b,std::array* out); +}; +REGISTER_CALCULATOR(ColorSliderCalculator); + +//static +absl::Status ColorSliderCalculator::GetContract (CalculatorContract *cc){ + cc->Inputs().Index(0).Set(); + cc->Inputs().Index(1).Set(); + cc->Inputs().Index(2).Set(); + + if (cc->Outputs().HasTag("RGB_OUT")){ + cc->Outputs().Tag("RGB_OUT").Set>(); + } + return absl::OkStatus(); +} + +absl::Status ColorSliderCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); +} + +absl::Status ColorSliderCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().NumEntries() == 0) { + return tool::StatusStop(); + } + int red_buffer = cc->Inputs().Index(0).Value().Get(); + int green_buffer = cc->Inputs().Index(1).Value().Get(); + int blue_buffer = cc->Inputs().Index(2).Value().Get(); + auto out = absl::make_unique>(); + make_array(red_buffer,green_buffer,blue_buffer, out.get()); + + cc->Outputs().Tag("RGB_OUT").Add(out.release(), cc->InputTimestamp()); + LOG(INFO) << "Color Slider Calculator Runner" << red_buffer << " " << green_buffer << " " << blue_buffer << "\n"; + return absl::OkStatus(); +} + +void ColorSliderCalculator::make_array(int r,int g,int b,std::array* out){ + (*out)[0] =r; + (*out)[1] =g; + (*out)[2] =b; +} + +} // namespace mediapipe diff --git a/mediapipe/calculators/image/recolor_calculator.cc b/mediapipe/calculators/image/recolor_calculator.cc index 062fb2cb3..0178a4469 100644 --- a/mediapipe/calculators/image/recolor_calculator.cc +++ b/mediapipe/calculators/image/recolor_calculator.cc @@ -28,6 +28,7 @@ #include "mediapipe/gpu/gl_calculator_helper.h" #include "mediapipe/gpu/gl_simple_shaders.h" #include "mediapipe/gpu/shader_util.h" +#include "mediapipe/gpu/gpu_buffer.h" #endif // !MEDIAPIPE_DISABLE_GPU namespace { @@ -37,6 +38,7 @@ constexpr char kImageFrameTag[] = "IMAGE"; constexpr char kMaskCpuTag[] = "MASK"; constexpr char kGpuBufferTag[] = "IMAGE_GPU"; constexpr char kMaskGpuTag[] = "MASK_GPU"; +constexpr char kRgbOutTag[] = "RGB_OUT"; inline cv::Vec3b Blend(const cv::Vec3b& color1, const cv::Vec3b& color2, float weight, int invert_mask, @@ -69,6 +71,7 @@ namespace mediapipe { // MASK: An ImageFrame input mask in ImageFormat::GRAY8, SRGB, SRGBA, or // VEC32F1 // MASK_GPU: A GpuBuffer input mask, RGBA. +// RGB_OUT: A vector of RGB values // Output: // One of the following IMAGE tags: // IMAGE: An ImageFrame output image. @@ -114,6 +117,7 @@ class RecolorCalculator : public CalculatorBase { bool initialized_ = false; std::vector color_; + std::vector my_color = {0,0,0}; mediapipe::RecolorCalculatorOptions::MaskChannel mask_channel_; bool use_gpu_ = false; @@ -153,6 +157,10 @@ absl::Status RecolorCalculator::GetContract(CalculatorContract* cc) { cc->Inputs().Tag(kMaskCpuTag).Set(); } + if (cc->Inputs().HasTag(kRgbOutTag)) { + cc->Inputs().Tag(kRgbOutTag).Set>(); + } + #if !MEDIAPIPE_DISABLE_GPU if (cc->Outputs().HasTag(kGpuBufferTag)) { cc->Outputs().Tag(kGpuBufferTag).Set(); @@ -308,9 +316,16 @@ absl::Status RecolorCalculator::RenderGpu(CalculatorContext* cc) { // Get inputs and setup output. const Packet& input_packet = cc->Inputs().Tag(kGpuBufferTag).Value(); const Packet& mask_packet = cc->Inputs().Tag(kMaskGpuTag).Value(); + const Packet& rgb_packet = cc->Inputs().Tag(kRgbOutTag).Value(); const auto& input_buffer = input_packet.Get(); const auto& mask_buffer = mask_packet.Get(); + const auto& rgb_buffer = rgb_packet.Get>(); + + my_color[0] = rgb_buffer[0] / 255.0; + my_color[1] = rgb_buffer[1] / 255.0; + my_color[2] = rgb_buffer[2] / 255.0; + auto img_tex = gpu_helper_.CreateSourceTexture(input_buffer); auto mask_tex = gpu_helper_.CreateSourceTexture(mask_buffer); @@ -365,6 +380,7 @@ void RecolorCalculator::GlRender() { // program glUseProgram(program_); + glUniform3f(glGetUniformLocation(program_, "recolor"), my_color[0], my_color[1], my_color[2]); // vertex storage GLuint vbo[2]; @@ -492,8 +508,8 @@ absl::Status RecolorCalculator::InitGpu(CalculatorContext* cc) { glUseProgram(program_); glUniform1i(glGetUniformLocation(program_, "frame"), 1); glUniform1i(glGetUniformLocation(program_, "mask"), 2); - glUniform3f(glGetUniformLocation(program_, "recolor"), color_[0] / 255.0, - color_[1] / 255.0, color_[2] / 255.0); + glUniform3f(glGetUniformLocation(program_, "recolor"), my_color[0], + my_color[1], my_color[2]); glUniform1f(glGetUniformLocation(program_, "invert_mask"), invert_mask_ ? 1.0f : 0.0f); glUniform1f(glGetUniformLocation(program_, "adjust_with_luminance"), diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/AndroidManifest.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/AndroidManifest.xml new file mode 100644 index 000000000..d3c232797 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/AndroidManifest.xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD index df58f2713..5847e4301 100644 --- a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -licenses(["notice"]) +licenses(["notice"]) # Apache 2.0 package(default_visibility = ["//visibility:private"]) @@ -32,29 +32,49 @@ cc_library( alwayslink = 1, ) -android_binary( - name = "hairsegmentationgpu", +# Maps the binary graph to an alias (e.g., the app name) for convenience so that the alias can be +# easily incorporated into the app via, for example, +# MainActivity.BINARY_GRAPH_NAME = "appname.binarypb". +genrule( + name = "binary_graph", + srcs = ["//mediapipe/graphs/hair_segmentation:mobile_gpu_binary_graph"], + outs = ["hairsegmentationgpu.binarypb"], + cmd = "cp $< $@", +) + +android_library( + name = "mediapipe_lib", srcs = glob(["*.java"]), assets = [ - "//mediapipe/graphs/hair_segmentation:mobile_gpu.binarypb", + ":binary_graph", "//mediapipe/models:hair_segmentation.tflite", ], assets_dir = "", - manifest = "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:AndroidManifest.xml", - manifest_values = { - "applicationId": "com.google.mediapipe.apps.hairsegmentationgpu", - "appName": "Hair Segmentation", - "mainActivity": "com.google.mediapipe.apps.basic.MainActivity", - "cameraFacingFront": "True", - "binaryGraphName": "mobile_gpu.binarypb", - "inputVideoStreamName": "input_video", - "outputVideoStreamName": "output_video", - "flipFramesVertically": "True", - "converterNumBuffers": "2", - }, - multidex = "native", + manifest = "AndroidManifest.xml", + resource_files = glob(["res/**"]), deps = [ ":mediapipe_jni_lib", - "//mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:basic_lib", + "//mediapipe/java/com/google/mediapipe/components:android_camerax_helper", + "//mediapipe/java/com/google/mediapipe/components:android_components", + "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/java/com/google/mediapipe/glutil", + "//third_party:androidx_appcompat", + "//third_party:androidx_constraint_layout", + "//third_party:androidx_legacy_support_v4", + "//third_party:androidx_material", + "//third_party:androidx_recyclerview", + "//third_party:opencv", + "@maven//:androidx_concurrent_concurrent_futures", + "@maven//:com_google_guava_guava", + ], +) + +android_binary( + name = "hairsegmentationgpu", + manifest = "AndroidManifest.xml", + manifest_values = {"applicationId": "com.google.mediapipe.apps.hairsegmentationgpu"}, + multidex = "native", + deps = [ + ":mediapipe_lib", ], ) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/MainActivity.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/MainActivity.java new file mode 100644 index 000000000..6c5cffe13 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/MainActivity.java @@ -0,0 +1,284 @@ +package com.google.mediapipe.apps.hairsegmentationgpu; + +import android.graphics.SurfaceTexture; +import android.os.Bundle; +import android.util.Size; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.View; +import android.view.ViewGroup; +import android.widget.SeekBar; +import android.widget.Toast; + +import androidx.appcompat.app.AppCompatActivity; + +import com.google.mediapipe.components.CameraHelper; +import com.google.mediapipe.components.CameraXPreviewHelper; +import com.google.mediapipe.components.ExternalTextureConverter; + + +import com.google.mediapipe.components.FrameProcessor; +import com.google.mediapipe.components.PermissionHelper; +import com.google.mediapipe.framework.AndroidAssetUtil; +import com.google.mediapipe.glutil.EglManager; +import com.google.mediapipe.framework.Packet; + + + +/** + * Main activity of MediaPipe example apps. + */ +public class MainActivity extends AppCompatActivity { + private static final String TAG = "MainActivity"; + + private static final String BINARY_GRAPH_NAME = "hairsegmentationgpu.binarypb"; + private static final String INPUT_VIDEO_STREAM_NAME = "input_video"; + private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video"; + private static final String RED_INPUT_STREAM ="red"; + private static final String GREEN_INPUT_STREAM ="green"; + private static final String BLUE_INPUT_STREAM ="blue"; + + private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT; + private static final boolean FLIP_FRAMES_VERTICALLY = true; + + protected int red_progress = 0; + protected int blue_progress = 0; + protected int green_progress = 0; + public Packet red_packet; + public Packet green_packet; + public Packet blue_packet; + SeekBar red_seekBar; + SeekBar green_seekBar; + SeekBar blue_seekBar; + static { + // Load all native libraries needed by the app. + System.loadLibrary("mediapipe_jni"); + System.loadLibrary("opencv_java3"); + } + + // {@link SurfaceTexture} where the camera-preview frames can be accessed. + private SurfaceTexture previewFrameTexture; + // {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph. + private SurfaceView previewDisplayView; + + // Creates and manages an {@link EGLContext}. + private EglManager eglManager; + // Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed + // frames onto a {@link Surface}. + private FrameProcessor processor; + // Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be + // consumed by {@link FrameProcessor} and the underlying MediaPipe graph. + private ExternalTextureConverter converter; + + // Handles camera access via the {@link CameraX} Jetpack support library. + private CameraXPreviewHelper cameraHelper; + + private RGBHandler rgbHandler; + + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + red_seekBar = (SeekBar) findViewById(R.id.red_seekbar); + blue_seekBar = (SeekBar) findViewById(R.id.green_seekbar); + green_seekBar = (SeekBar) findViewById(R.id.blue_seekbar); + + red_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { + + @Override + public void onProgressChanged(SeekBar seekBar, int i, boolean b) { + + red_progress = i; + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + + } + }); + + green_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { + + @Override + public void onProgressChanged(SeekBar seekBar, int i, boolean b) { + green_progress = i; + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + + } + }); + blue_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { + + @Override + public void onProgressChanged(SeekBar seekBar, int i, boolean b) { + blue_progress= i; + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + + } + }); + + + previewDisplayView = new SurfaceView(this); + setupPreviewDisplayView(); + // Initilize asset manager so that MediaPipe native libraries can access the app assets, e.g., + // binary graphs. + AndroidAssetUtil.initializeNativeAssetManager(this); + eglManager = new EglManager(null); + processor = + new FrameProcessor( + this, + eglManager.getNativeContext(), + BINARY_GRAPH_NAME, + INPUT_VIDEO_STREAM_NAME, + OUTPUT_VIDEO_STREAM_NAME); + + rgbHandler = new RGBHandler(); + processor.setOnWillAddFrameListener(rgbHandler); + + processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY); + PermissionHelper.checkAndRequestCameraPermissions(this); + + } + + private class RGBHandler implements FrameProcessor.OnWillAddFrameListener + { + @Override + public void onWillAddFrame(long timestamp){ + + red_packet = processor.getPacketCreator().createInt32(red_progress); + green_packet = processor.getPacketCreator().createInt32(green_progress); + blue_packet = processor.getPacketCreator().createInt32(blue_progress); + processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM, + red_packet, timestamp); + processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM, + green_packet, timestamp); + processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM, + blue_packet, timestamp); + red_packet.release(); + green_packet.release(); + blue_packet.release(); + + } + } + + + //new + @Override + protected void onResume() { + super.onResume(); + converter = new ExternalTextureConverter(eglManager.getContext()); + converter.setFlipY(FLIP_FRAMES_VERTICALLY); + converter.setConsumer(processor); + if (PermissionHelper.cameraPermissionsGranted(this)) { + startCamera(); + } + } + + @Override + protected void onPause() { + super.onPause(); + converter.close(); + + // Hide preview display until we re-open the camera again. + previewDisplayView.setVisibility(View.GONE); + } + + @Override + public void onRequestPermissionsResult( + int requestCode, String[] permissions, int[] grantResults) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + + protected void onCameraStarted(SurfaceTexture surfaceTexture) { + previewFrameTexture = surfaceTexture; + // Make the display view visible to start showing the preview. This triggers the + // SurfaceHolder.Callback added to (the holder of) previewDisplayView. + previewDisplayView.setVisibility(View.VISIBLE); + } + + protected Size cameraTargetResolution() { + return null; // No preference and let the camera (helper) decide. + } + + public void startCamera() { + cameraHelper = new CameraXPreviewHelper(); + cameraHelper.setOnCameraStartedListener( + surfaceTexture -> { + onCameraStarted(surfaceTexture); + }); + CameraHelper.CameraFacing cameraFacing = CameraHelper.CameraFacing.FRONT; + cameraHelper.startCamera( + this, cameraFacing, /*surfaceTexture=*/ null, cameraTargetResolution()); + } + + protected Size computeViewSize(int width, int height) { + return new Size(width, height); + } + + protected void onPreviewDisplaySurfaceChanged( + SurfaceHolder holder, int format, int width, int height) { + // (Re-)Compute the ideal size of the camera-preview display (the area that the + // camera-preview frames get rendered onto, potentially with scaling and rotation) + // based on the size of the SurfaceView that contains the display. + Size viewSize = computeViewSize(width, height); + Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize); + boolean isCameraRotated = cameraHelper.isCameraRotated(); + + // Connect the converter to the camera-preview frames as its input (via + // previewFrameTexture), and configure the output width and height as the computed + // display size. + converter.setSurfaceTextureAndAttachToGLContext( + previewFrameTexture, + isCameraRotated ? displaySize.getHeight() : displaySize.getWidth(), + isCameraRotated ? displaySize.getWidth() : displaySize.getHeight()); + } + + private void setupPreviewDisplayView() { + previewDisplayView.setVisibility(View.GONE); + ViewGroup viewGroup = findViewById(R.id.preview_display_layout); + viewGroup.addView(previewDisplayView); + + previewDisplayView + .getHolder() + .addCallback( + new SurfaceHolder.Callback() { + @Override + public void surfaceCreated(SurfaceHolder holder) { + processor.getVideoSurfaceOutput().setSurface(holder.getSurface()); + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + onPreviewDisplaySurfaceChanged(holder, format, width, height); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + processor.getVideoSurfaceOutput().setSurface(null); + } + }); + } +} + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/layout/activity_main.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/layout/activity_main.xml new file mode 100644 index 000000000..e871a8edb --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/layout/activity_main.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/colors.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/colors.xml new file mode 100644 index 000000000..69b22338c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #008577 + #00574B + #D81B60 + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/strings.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/strings.xml new file mode 100644 index 000000000..41cce9899 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/strings.xml @@ -0,0 +1,4 @@ + + Hair Segmentation GPU + Please grant camera permissions. + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/styles.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/styles.xml new file mode 100644 index 000000000..5885930df --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/hairsegmentationgpu/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/mediapipe/graphs/hair_segmentation/BUILD b/mediapipe/graphs/hair_segmentation/BUILD index b177726bf..0d2ba92e1 100644 --- a/mediapipe/graphs/hair_segmentation/BUILD +++ b/mediapipe/graphs/hair_segmentation/BUILD @@ -25,8 +25,10 @@ cc_library( name = "mobile_calculators", deps = [ "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:packet_cloner_calculator", "//mediapipe/calculators/core:previous_loopback_calculator", "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/image:color_slider_calculator", "//mediapipe/calculators/image:recolor_calculator", "//mediapipe/calculators/image:set_alpha_calculator", "//mediapipe/calculators/tflite:tflite_converter_calculator", diff --git a/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt b/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt index c8db44d40..f349680ba 100644 --- a/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt +++ b/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt @@ -4,6 +4,9 @@ # Images on GPU coming into and out of the graph. input_stream: "input_video" +input_stream: "red" +input_stream: "green" +input_stream: "blue" output_stream: "output_video" # Throttles the images flowing downstream for flow control. It passes through @@ -111,7 +114,7 @@ node { input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" node_options: { [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { - model_path: "mediapipe/models/hair_segmentation.tflite" + model_path: "hair_segmentation.tflite" use_gpu: true } } @@ -136,12 +139,21 @@ node { } } } +# Feature : Color slider for sliding any color values(R:, G:, B:) +node{ + calculator: "ColorSliderCalculator" + input_stream: "red" + input_stream: "green" + input_stream: "blue" + output_stream: "RGB_OUT:rgb_array" +} # Colors the hair segmentation with the color specified in the option. node { calculator: "RecolorCalculator" input_stream: "IMAGE_GPU:throttled_input_video" input_stream: "MASK_GPU:hair_mask" + input_stream: "RGB_OUT:rgb_array" output_stream: "IMAGE_GPU:output_video" node_options: { [type.googleapis.com/mediapipe.RecolorCalculatorOptions] {