Merged with master

This commit is contained in:
Prianka Liz Kariat 2023-05-29 22:42:37 +05:30
commit 1fa737cc5d
43 changed files with 285 additions and 610 deletions

View File

@ -1,4 +1,4 @@
# Copyright 2022 The MediaPipe Authors. # Copyright 2023 The MediaPipe Authors.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

View File

@ -1,4 +1,4 @@
# Copyright 2022 The MediaPipe Authors. # Copyright 2023 The MediaPipe Authors.
# #
# Licensed under the Apache License, Version 2.0 (the 'License'); # Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.

View File

@ -71,23 +71,21 @@ constexpr char kFaceLandmarksDetectorTFLiteName[] =
"face_landmarks_detector.tflite"; "face_landmarks_detector.tflite";
constexpr char kFaceStylizerTFLiteName[] = "face_stylizer.tflite"; constexpr char kFaceStylizerTFLiteName[] = "face_stylizer.tflite";
constexpr char kImageTag[] = "IMAGE"; constexpr char kImageTag[] = "IMAGE";
constexpr char kImageCpuTag[] = "IMAGE_CPU";
constexpr char kImageGpuTag[] = "IMAGE_GPU";
constexpr char kImageSizeTag[] = "IMAGE_SIZE"; constexpr char kImageSizeTag[] = "IMAGE_SIZE";
constexpr char kMatrixTag[] = "MATRIX"; constexpr char kMatrixTag[] = "MATRIX";
constexpr char kNormLandmarksTag[] = "NORM_LANDMARKS"; constexpr char kNormLandmarksTag[] = "NORM_LANDMARKS";
constexpr char kNormRectTag[] = "NORM_RECT"; constexpr char kNormRectTag[] = "NORM_RECT";
constexpr char kOutputSizeTag[] = "OUTPUT_SIZE";
constexpr char kSizeTag[] = "SIZE"; constexpr char kSizeTag[] = "SIZE";
constexpr char kStylizedImageTag[] = "STYLIZED_IMAGE"; constexpr char kStylizedImageTag[] = "STYLIZED_IMAGE";
constexpr char kTensorsTag[] = "TENSORS"; constexpr char kTensorsTag[] = "TENSORS";
constexpr int kFaceAlignmentOutputSize = 256; constexpr char kTransformationMatrixTag[] = "TRANSFORMATION_MATRIX";
// Struct holding the different output streams produced by the face stylizer // Struct holding the different output streams produced by the face stylizer
// graph. // graph.
struct FaceStylizerOutputStreams { struct FaceStylizerOutputStreams {
std::optional<Source<Image>> stylized_image; std::optional<Source<Image>> stylized_image;
std::optional<Source<Image>> face_alignment_image; std::optional<Source<Image>> face_alignment_image;
std::optional<Source<std::array<float, 16>>> transformation_matrix;
Source<Image> original_image; Source<Image> original_image;
}; };
@ -202,6 +200,10 @@ void ConfigureTensorsToImageCalculator(
// The aligned face image that is fed to the face stylization model to // The aligned face image that is fed to the face stylization model to
// perform stylization. Also useful for preparing face stylization training // perform stylization. Also useful for preparing face stylization training
// data. // data.
// TRANSFORMATION_MATRIX - std::array<float,16>
// An std::array<float, 16> representing a 4x4 row-major-order matrix that
// maps a point on the input image to a point on the output image, and
// can be used to reverse the mapping by inverting the matrix.
// IMAGE - mediapipe::Image // IMAGE - mediapipe::Image
// The input image that the face landmarker runs on and has the pixel data // The input image that the face landmarker runs on and has the pixel data
// stored on the target storage (CPU vs GPU). // stored on the target storage (CPU vs GPU).
@ -280,6 +282,8 @@ class FaceStylizerGraph : public core::ModelTaskGraph {
output_streams.face_alignment_image.value() >> output_streams.face_alignment_image.value() >>
graph[Output<Image>(kFaceAlignmentTag)]; graph[Output<Image>(kFaceAlignmentTag)];
} }
output_streams.transformation_matrix.value() >>
graph[Output<std::array<float, 16>>(kTransformationMatrixTag)];
output_streams.original_image >> graph[Output<Image>(kImageTag)]; output_streams.original_image >> graph[Output<Image>(kImageTag)];
return graph.GetConfig(); return graph.GetConfig();
} }
@ -357,9 +361,10 @@ class FaceStylizerGraph : public core::ModelTaskGraph {
image_to_tensor.GetOptions<ImageToTensorCalculatorOptions>(); image_to_tensor.GetOptions<ImageToTensorCalculatorOptions>();
image_to_tensor_options.mutable_output_tensor_float_range()->set_min(0); image_to_tensor_options.mutable_output_tensor_float_range()->set_min(0);
image_to_tensor_options.mutable_output_tensor_float_range()->set_max(1); image_to_tensor_options.mutable_output_tensor_float_range()->set_max(1);
image_to_tensor_options.set_output_tensor_width(kFaceAlignmentOutputSize); image_to_tensor_options.set_output_tensor_width(
task_options.face_alignment_size());
image_to_tensor_options.set_output_tensor_height( image_to_tensor_options.set_output_tensor_height(
kFaceAlignmentOutputSize); task_options.face_alignment_size());
image_to_tensor_options.set_keep_aspect_ratio(true); image_to_tensor_options.set_keep_aspect_ratio(true);
image_to_tensor_options.set_border_mode( image_to_tensor_options.set_border_mode(
mediapipe::ImageToTensorCalculatorOptions::BORDER_ZERO); mediapipe::ImageToTensorCalculatorOptions::BORDER_ZERO);
@ -378,6 +383,8 @@ class FaceStylizerGraph : public core::ModelTaskGraph {
return {{/*stylized_image=*/std::nullopt, return {{/*stylized_image=*/std::nullopt,
/*alignment_image=*/face_alignment, /*alignment_image=*/face_alignment,
/*transformation_matrix=*/
image_to_tensor.Out(kMatrixTag).Cast<std::array<float, 16>>(),
/*original_image=*/pass_through.Out("").Cast<Image>()}}; /*original_image=*/pass_through.Out("").Cast<Image>()}};
} }
@ -439,6 +446,8 @@ class FaceStylizerGraph : public core::ModelTaskGraph {
return {{/*stylized_image=*/stylized, return {{/*stylized_image=*/stylized,
/*alignment_image=*/face_alignment, /*alignment_image=*/face_alignment,
/*transformation_matrix=*/
preprocessing.Out(kMatrixTag).Cast<std::array<float, 16>>(),
/*original_image=*/preprocessing.Out(kImageTag).Cast<Image>()}}; /*original_image=*/preprocessing.Out(kImageTag).Cast<Image>()}};
} }
}; };

View File

@ -36,4 +36,7 @@ message FaceStylizerGraphOptions {
// Options for face landmarker graph. // Options for face landmarker graph.
optional vision.face_landmarker.proto.FaceLandmarkerGraphOptions optional vision.face_landmarker.proto.FaceLandmarkerGraphOptions
face_landmarker_graph_options = 2; face_landmarker_graph_options = 2;
// The width and height of the output face alignment images.
optional int32 face_alignment_size = 3 [default = 256];
} }

View File

@ -1,37 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.mediapipe.tasks.examples.objectdetector">
<uses-sdk
android:minSdkVersion="28"
android:targetSdkVersion="30" />
<!-- For loading images from gallery -->
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<!-- For using the camera -->
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<!-- For logging solution events -->
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="MediaPipe Tasks Object Detector"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme"
android:exported="false">
<activity android:name=".MainActivity"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -1,49 +0,0 @@
# Copyright 2022 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
android_binary(
name = "objectdetector",
srcs = glob(["**/*.java"]),
assets = [
"//mediapipe/tasks/testdata/vision:test_models",
],
assets_dir = "",
custom_package = "com.google.mediapipe.tasks.examples.objectdetector",
manifest = "AndroidManifest.xml",
manifest_values = {
"applicationId": "com.google.mediapipe.tasks.examples.objectdetector",
},
multidex = "native",
resource_files = ["//mediapipe/tasks/examples/android:resource_files"],
deps = [
"//mediapipe/java/com/google/mediapipe/framework:android_framework",
"//mediapipe/java/com/google/mediapipe/framework/image",
"//mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers:detection",
"//mediapipe/tasks/java/com/google/mediapipe/tasks/core",
"//mediapipe/tasks/java/com/google/mediapipe/tasks/vision:core",
"//mediapipe/tasks/java/com/google/mediapipe/tasks/vision:objectdetector",
"//third_party:androidx_appcompat",
"//third_party:androidx_constraint_layout",
"//third_party:opencv",
"@maven//:androidx_activity_activity",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_exifinterface_exifinterface",
"@maven//:androidx_fragment_fragment",
"@maven//:com_google_guava_guava",
],
)

View File

@ -1,239 +0,0 @@
// Copyright 2022 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.tasks.examples.objectdetector;
import android.content.Intent;
import android.graphics.Bitmap;
import android.media.MediaMetadataRetriever;
import android.os.Bundle;
import android.provider.MediaStore;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.exifinterface.media.ExifInterface;
// ContentResolver dependency
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.core.BaseOptions;
import com.google.mediapipe.tasks.vision.core.ImageProcessingOptions;
import com.google.mediapipe.tasks.vision.core.RunningMode;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetector;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetector.ObjectDetectorOptions;
import java.io.IOException;
import java.io.InputStream;
/** Main activity of MediaPipe Task Object Detector reference app. */
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final String MODEL_FILE = "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite";
private ObjectDetector objectDetector;
private enum InputSource {
UNKNOWN,
IMAGE,
VIDEO,
CAMERA,
}
private InputSource inputSource = InputSource.UNKNOWN;
// Image mode demo component.
private ActivityResultLauncher<Intent> imageGetter;
// Video mode demo component.
private ActivityResultLauncher<Intent> videoGetter;
private ObjectDetectionResultImageView imageView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setupImageModeDemo();
setupVideoModeDemo();
// TODO: Adds live camera demo.
}
/** Sets up the image mode demo. */
private void setupImageModeDemo() {
imageView = new ObjectDetectionResultImageView(this);
// The Intent to access gallery and read images as bitmap.
imageGetter =
registerForActivityResult(
new ActivityResultContracts.StartActivityForResult(),
result -> {
Intent resultIntent = result.getData();
if (resultIntent != null) {
if (result.getResultCode() == RESULT_OK) {
Bitmap bitmap = null;
int rotation = 0;
try {
bitmap =
downscaleBitmap(
MediaStore.Images.Media.getBitmap(
this.getContentResolver(), resultIntent.getData()));
} catch (IOException e) {
Log.e(TAG, "Bitmap reading error:" + e);
}
try {
InputStream imageData =
this.getContentResolver().openInputStream(resultIntent.getData());
rotation = getImageRotation(imageData);
} catch (IOException | MediaPipeException e) {
Log.e(TAG, "Bitmap rotation error:" + e);
}
if (bitmap != null) {
MPImage image = new BitmapImageBuilder(bitmap).build();
ObjectDetectionResult detectionResult =
objectDetector.detect(
image,
ImageProcessingOptions.builder().setRotationDegrees(rotation).build());
imageView.setData(image, detectionResult);
runOnUiThread(() -> imageView.update());
}
}
}
});
Button loadImageButton = findViewById(R.id.button_load_picture);
loadImageButton.setOnClickListener(
v -> {
if (inputSource != InputSource.IMAGE) {
createObjectDetector(RunningMode.IMAGE);
this.inputSource = InputSource.IMAGE;
updateLayout();
}
// Reads images from gallery.
Intent pickImageIntent = new Intent(Intent.ACTION_PICK);
pickImageIntent.setDataAndType(MediaStore.Images.Media.INTERNAL_CONTENT_URI, "image/*");
imageGetter.launch(pickImageIntent);
});
}
/** Sets up the video mode demo. */
private void setupVideoModeDemo() {
imageView = new ObjectDetectionResultImageView(this);
// The Intent to access gallery and read a video file.
videoGetter =
registerForActivityResult(
new ActivityResultContracts.StartActivityForResult(),
result -> {
Intent resultIntent = result.getData();
if (resultIntent != null) {
if (result.getResultCode() == RESULT_OK) {
MediaMetadataRetriever metaRetriever = new MediaMetadataRetriever();
metaRetriever.setDataSource(this, resultIntent.getData());
long duration =
Long.parseLong(
metaRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_DURATION));
int numFrames =
Integer.parseInt(
metaRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_FRAME_COUNT));
long frameIntervalMs = duration / numFrames;
for (int i = 0; i < numFrames; ++i) {
MPImage image =
new BitmapImageBuilder(metaRetriever.getFrameAtIndex(i)).build();
ObjectDetectionResult detectionResult =
objectDetector.detectForVideo(image, frameIntervalMs * i);
// Currently only annotates the detection result on the first video frame and
// display it to verify the correctness.
// TODO: Annotates the detection result on every frame, save the
// annotated frames as a video file, and play back the video afterwards.
if (i == 0) {
imageView.setData(image, detectionResult);
runOnUiThread(() -> imageView.update());
}
}
}
}
});
Button loadVideoButton = findViewById(R.id.button_load_video);
loadVideoButton.setOnClickListener(
v -> {
createObjectDetector(RunningMode.VIDEO);
updateLayout();
this.inputSource = InputSource.VIDEO;
// Reads a video from gallery.
Intent pickVideoIntent = new Intent(Intent.ACTION_PICK);
pickVideoIntent.setDataAndType(MediaStore.Video.Media.INTERNAL_CONTENT_URI, "video/*");
videoGetter.launch(pickVideoIntent);
});
}
private void createObjectDetector(RunningMode mode) {
if (objectDetector != null) {
objectDetector.close();
}
// Initializes a new MediaPipe ObjectDetector instance
ObjectDetectorOptions options =
ObjectDetectorOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(MODEL_FILE).build())
.setScoreThreshold(0.5f)
.setMaxResults(5)
.setRunningMode(mode)
.build();
objectDetector = ObjectDetector.createFromOptions(this, options);
}
private void updateLayout() {
// Updates the preview layout.
FrameLayout frameLayout = findViewById(R.id.preview_display_layout);
frameLayout.removeAllViewsInLayout();
imageView.setImageDrawable(null);
frameLayout.addView(imageView);
imageView.setVisibility(View.VISIBLE);
}
private Bitmap downscaleBitmap(Bitmap originalBitmap) {
double aspectRatio = (double) originalBitmap.getWidth() / originalBitmap.getHeight();
int width = imageView.getWidth();
int height = imageView.getHeight();
if (((double) imageView.getWidth() / imageView.getHeight()) > aspectRatio) {
width = (int) (height * aspectRatio);
} else {
height = (int) (width / aspectRatio);
}
return Bitmap.createScaledBitmap(originalBitmap, width, height, false);
}
private int getImageRotation(InputStream imageData) throws IOException, MediaPipeException {
int orientation =
new ExifInterface(imageData)
.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_NORMAL:
return 0;
case ExifInterface.ORIENTATION_ROTATE_90:
return 90;
case ExifInterface.ORIENTATION_ROTATE_180:
return 180;
case ExifInterface.ORIENTATION_ROTATE_270:
return 270;
default:
// TODO: use getRotationDegrees() and isFlipped() instead of switch once flip
// is supported.
throw new MediaPipeException(
MediaPipeException.StatusCode.UNIMPLEMENTED.ordinal(),
"Flipped images are not supported yet.");
}
}
}

View File

@ -1,77 +0,0 @@
// Copyright 2022 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.tasks.examples.objectdetector;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import androidx.appcompat.widget.AppCompatImageView;
import com.google.mediapipe.framework.image.BitmapExtractor;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.containers.Detection;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult;
/** An ImageView implementation for displaying {@link ObjectDetectionResult}. */
public class ObjectDetectionResultImageView extends AppCompatImageView {
private static final String TAG = "ObjectDetectionResultImageView";
private static final int BBOX_COLOR = Color.GREEN;
private static final int BBOX_THICKNESS = 5; // Pixels
private Bitmap latest;
public ObjectDetectionResultImageView(Context context) {
super(context);
setScaleType(AppCompatImageView.ScaleType.FIT_CENTER);
}
/**
* Sets a {@link MPImage} and an {@link ObjectDetectionResult} to render.
*
* @param image a {@link MPImage} object for annotation.
* @param result an {@link ObjectDetectionResult} object that contains the detection result.
*/
public void setData(MPImage image, ObjectDetectionResult result) {
if (image == null || result == null) {
return;
}
latest = BitmapExtractor.extract(image);
Canvas canvas = new Canvas(latest);
canvas.drawBitmap(latest, new Matrix(), null);
for (int i = 0; i < result.detections().size(); ++i) {
drawDetectionOnCanvas(result.detections().get(i), canvas);
}
}
/** Updates the image view with the latest {@link ObjectDetectionResult}. */
public void update() {
postInvalidate();
if (latest != null) {
setImageBitmap(latest);
}
}
private void drawDetectionOnCanvas(Detection detection, Canvas canvas) {
// TODO: Draws the category and the score per bounding box.
// Draws bounding box.
Paint bboxPaint = new Paint();
bboxPaint.setColor(BBOX_COLOR);
bboxPaint.setStyle(Paint.Style.STROKE);
bboxPaint.setStrokeWidth(BBOX_THICKNESS);
canvas.drawRect(detection.boundingBox(), bboxPaint);
}
}

View File

@ -1,34 +0,0 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1" />
</vector>

View File

@ -1,74 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<vector
android:height="108dp"
android:width="108dp"
android:viewportHeight="108"
android:viewportWidth="108"
xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z"/>
<path android:fillColor="#00000000" android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
<path android:fillColor="#00000000" android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF" android:strokeWidth="0.8"/>
</vector>

View File

@ -1,40 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<LinearLayout
android:id="@+id/buttons"
android:layout_width="match_parent"
android:layout_height="wrap_content"
style="?android:attr/buttonBarStyle" android:gravity="center"
android:orientation="horizontal">
<Button
android:id="@+id/button_load_picture"
android:layout_width="wrap_content"
style="?android:attr/buttonBarButtonStyle" android:layout_height="wrap_content"
android:text="@string/load_picture" />
<Button
android:id="@+id/button_load_video"
android:layout_width="wrap_content"
style="?android:attr/buttonBarButtonStyle" android:layout_height="wrap_content"
android:text="@string/load_video" />
<Button
android:id="@+id/button_start_camera"
android:layout_width="wrap_content"
style="?android:attr/buttonBarButtonStyle" android:layout_height="wrap_content"
android:text="@string/start_camera" />
</LinearLayout>
<FrameLayout
android:id="@+id/preview_display_layout"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextView
android:id="@+id/no_view"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:text="@string/instruction" />
</FrameLayout>
</LinearLayout>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 959 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 900 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>

View File

@ -1,6 +0,0 @@
<resources>
<string name="load_picture" translatable="false">Load Picture</string>
<string name="load_video" translatable="false">Load Video</string>
<string name="start_camera" translatable="false">Start Camera</string>
<string name="instruction" translatable="false">Please press any button above to start</string>
</resources>

View File

@ -1,11 +0,0 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -13,19 +13,21 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==============================================================================*/ ==============================================================================*/
#include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h"
#include <fstream> #include <fstream>
#include <sstream> #include <sstream>
#include "absl/status/status.h"
#include "google/protobuf/text_format.h"
namespace mediapipe::tasks::ios::test::vision::utils { namespace mediapipe::tasks::ios::test::vision::utils {
namespace { namespace {
using ::google::protobuf::Message;
using ::google::protobuf::TextFormat; using ::google::protobuf::TextFormat;
} // anonymous namespace } // anonymous namespace
absl::Status get_proto_from_pbtxt(const std::string file_path, ::absl::Status get_proto_from_pbtxt(const std::string file_path,
google::protobuf::Message& proto) { Message& proto) {
std::ifstream file_input_stream(file_path); std::ifstream file_input_stream(file_path);
if (!file_input_stream.is_open()) if (!file_input_stream.is_open())
return absl::InvalidArgumentError("Cannot read input file."); return absl::InvalidArgumentError("Cannot read input file.");
@ -34,8 +36,8 @@ absl::Status get_proto_from_pbtxt(const std::string file_path,
strings_stream << file_input_stream.rdbuf(); strings_stream << file_input_stream.rdbuf();
return TextFormat::ParseFromString(strings_stream.str(), &proto) return TextFormat::ParseFromString(strings_stream.str(), &proto)
? absl::OkStatus() ? ::absl::OkStatus()
: absl::InvalidArgumentError( : ::absl::InvalidArgumentError(
"Cannot read a valid proto from the input file."); "Cannot read a valid proto from the input file.");
} }

View File

@ -19,11 +19,11 @@ limitations under the License.
#include <string> #include <string>
#include "absl/status/status.h" #include "absl/status/status.h"
#include "google/protobuf/text_format.h" #include "google/protobuf/message.h"
namespace mediapipe::tasks::ios::test::vision::utils { namespace mediapipe::tasks::ios::test::vision::utils {
absl::Status get_proto_from_pbtxt(const std::string file_path, absl::Status get_proto_from_pbtxt(std::string file_path,
google::protobuf::Message& proto); ::google::protobuf::Message& proto);
} // namespace mediapipe::tasks::ios::test::vision::utils } // namespace mediapipe::tasks::ios::test::vision::utils
#endif // MEDIAPIPE_TASKS_IOS_TEST_VISION_UTILS_H_ #endif // MEDIAPIPE_TASKS_IOS_TEST_VISION_UTILS_H_

View File

@ -1,4 +1,4 @@
# Copyright 2022 The MediaPipe Authors. # Copyright 2023 The MediaPipe Authors.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -12,10 +12,16 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"]) licenses(["notice"])
filegroup( objc_library(
name = "resource_files", name = "MPPFaceLandmarkerOptions",
srcs = glob(["res/**"]), srcs = ["sources/MPPFaceLandmarkerOptions.m"],
visibility = ["//mediapipe/tasks/examples/android:__subpackages__"], hdrs = ["sources/MPPFaceLandmarkerOptions.h"],
deps = [
"//mediapipe/tasks/ios/core:MPPTaskOptions",
"//mediapipe/tasks/ios/vision/core:MPPRunningMode",
],
) )

View File

@ -0,0 +1,64 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h"
NS_ASSUME_NONNULL_BEGIN
/** Options for setting up a `MPPFaceLandmarker`. */
NS_SWIFT_NAME(FaceLandmarkerOptions)
@interface MPPFaceLandmarkerOptions : MPPTaskOptions <NSCopying>
/**
* Running mode of the face landmark dection task. Defaults to `MPPRunningModeImage`.
* `MPPFaceLandmarker` can be created with one of the following running modes:
* 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs.
* 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a
* video.
* 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of
* input data, such as from the camera.
*/
@property(nonatomic) MPPRunningMode runningMode;
/** The maximum number of faces can be detected by the FaceLandmarker. Defaults to 1. */
@property(nonatomic) NSInteger numFaces;
/**
* The minimum confidence score for the face detection to be considered successful. Defaults to 0.5.
*/
@property(nonatomic) float minFaceDetectionConfidence;
/**
* The minimum confidence score of face presence score in the face landmark detection. Defaults to
* 0.5.
*/
@property(nonatomic) float minFacePresenceConfidence;
/**
* The minimum confidence score for the face tracking to be considered successful. Defaults to 0.5.
*/
@property(nonatomic) float minTrackingConfidence;
/**
* Whether FaceLandmarker outputs face blendshapes classification. Face blendshapes are used for
* rendering the 3D face model.
*/
@property(nonatomic) BOOL outputFaceBlendshapes;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,43 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h"
@implementation MPPFaceLandmarkerOptions
- (instancetype)init {
self = [super init];
if (self) {
_numFaces = 1;
_minFaceDetectionConfidence = 0.5f;
_minFacePresenceConfidence = 0.5f;
_minTrackingConfidence = 0.5f;
_outputFaceBlendshapes = NO;
}
return self;
}
- (id)copyWithZone:(NSZone *)zone {
MPPFaceLandmarkerOptions *faceLandmarkerOptions = [super copyWithZone:zone];
faceLandmarkerOptions.numFaces = self.numFaces;
faceLandmarkerOptions.minFaceDetectionConfidence = self.minFaceDetectionConfidence;
faceLandmarkerOptions.minFacePresenceConfidence = self.minFacePresenceConfidence;
faceLandmarkerOptions.minTrackingConfidence = self.minTrackingConfidence;
faceLandmarkerOptions.outputFaceBlendshapes = self.outputFaceBlendshapes;
return faceLandmarkerOptions;
}
@end

View File

@ -0,0 +1,33 @@
# Copyright 2023 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
objc_library(
name = "MPPFaceLandmarkerOptionsHelpers",
srcs = ["sources/MPPFaceLandmarkerOptions+Helpers.mm"],
hdrs = ["sources/MPPFaceLandmarkerOptions+Helpers.h"],
deps = [
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/tasks/cc/vision/face_detector/proto:face_detector_graph_options_cc_proto",
"//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_cc_proto",
"//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarks_detector_graph_options_cc_proto",
"//mediapipe/tasks/ios/common/utils:NSStringHelpers",
"//mediapipe/tasks/ios/core:MPPTaskOptionsProtocol",
"//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers",
"//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarkerOptions",
],
)

View File

@ -0,0 +1,36 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __cplusplus
#error "This file requires Objective-C++."
#endif // __cplusplus
#include "mediapipe/framework/calculator_options.pb.h"
#import "mediapipe/tasks/ios/core/sources/MPPTaskOptionsProtocol.h"
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h"
NS_ASSUME_NONNULL_BEGIN
@interface MPPFaceLandmarkerOptions (Helpers) <MPPTaskOptionsProtocol>
/**
* Populates the provided `CalculatorOptions` proto container with the current settings.
*
* @param optionsProto The `CalculatorOptions` proto object to copy the settings to.
*/
- (void)copyToProto:(::mediapipe::CalculatorOptions *)optionsProto;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,53 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h"
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
#import "mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.h"
#include "mediapipe/tasks/cc/vision/face_detector/proto/face_detector_graph_options.pb.h"
#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarker_graph_options.pb.h"
#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarks_detector_graph_options.pb.h"
using CalculatorOptionsProto = ::mediapipe::CalculatorOptions;
using FaceDetectorGraphOptionsProto =
::mediapipe::tasks::vision::face_detector::proto::FaceDetectorGraphOptions;
using FaceLandmarkerGraphOptionsProto =
::mediapipe::tasks::vision::face_landmarker::proto::FaceLandmarkerGraphOptions;
using FaceLandmarksDetectorGraphOptionsProto =
::mediapipe::tasks::vision::face_landmarker::proto::FaceLandmarksDetectorGraphOptions;
@implementation MPPFaceLandmarkerOptions (Helpers)
- (void)copyToProto:(CalculatorOptionsProto *)optionsProto {
FaceLandmarkerGraphOptionsProto *faceLandmarkerGraphOptions =
optionsProto->MutableExtension(FaceLandmarkerGraphOptionsProto::ext);
faceLandmarkerGraphOptions->Clear();
[self.baseOptions copyToProto:faceLandmarkerGraphOptions->mutable_base_options()];
faceLandmarkerGraphOptions->set_min_tracking_confidence(self.minTrackingConfidence);
FaceLandmarksDetectorGraphOptionsProto *faceLandmarkerDetectorGraphOptions =
faceLandmarkerGraphOptions->mutable_face_landmarks_detector_graph_options();
faceLandmarkerDetectorGraphOptions->set_min_detection_confidence(self.minFacePresenceConfidence);
FaceDetectorGraphOptionsProto *faceDetctorGraphOptions =
faceLandmarkerGraphOptions->mutable_face_detector_graph_options();
faceDetctorGraphOptions->set_num_faces(self.numFaces);
faceDetctorGraphOptions->set_min_detection_confidence(self.minFaceDetectionConfidence);
}
@end

View File

@ -57,6 +57,7 @@ py_library(
deps = [ deps = [
":metadata_info", ":metadata_info",
":metadata_writer", ":metadata_writer",
":writer_utils",
"//mediapipe/tasks/metadata:image_segmenter_metadata_schema_py", "//mediapipe/tasks/metadata:image_segmenter_metadata_schema_py",
"//mediapipe/tasks/metadata:metadata_schema_py", "//mediapipe/tasks/metadata:metadata_schema_py",
"//mediapipe/tasks/python/metadata", "//mediapipe/tasks/python/metadata",

View File

@ -22,6 +22,7 @@ from mediapipe.tasks.metadata import metadata_schema_py_generated as _metadata_f
from mediapipe.tasks.python.metadata import metadata from mediapipe.tasks.python.metadata import metadata
from mediapipe.tasks.python.metadata.metadata_writers import metadata_info from mediapipe.tasks.python.metadata.metadata_writers import metadata_info
from mediapipe.tasks.python.metadata.metadata_writers import metadata_writer from mediapipe.tasks.python.metadata.metadata_writers import metadata_writer
from mediapipe.tasks.python.metadata.metadata_writers import writer_utils
_MODEL_NAME = "ImageSegmenter" _MODEL_NAME = "ImageSegmenter"
@ -148,10 +149,17 @@ class MetadataWriter(metadata_writer.MetadataWriterBase):
writer = metadata_writer.MetadataWriter(model_buffer) writer = metadata_writer.MetadataWriter(model_buffer)
writer.add_general_info(_MODEL_NAME, _MODEL_DESCRIPTION) writer.add_general_info(_MODEL_NAME, _MODEL_DESCRIPTION)
writer.add_image_input(input_norm_mean, input_norm_std) writer.add_image_input(input_norm_mean, input_norm_std)
writer.add_segmentation_output(labels=labels)
if activation is not None: if activation is not None:
option_md = ImageSegmenterOptionsMd(activation) option_md = ImageSegmenterOptionsMd(activation)
writer.add_custom_metadata(option_md) writer.add_custom_metadata(option_md)
num_output_tensors = writer_utils.get_subgraph(model_buffer).OutputsLength()
if num_output_tensors == 2:
# For image segmenter model with 2 output tensors, the first one is
# quality score, and the second one is matting mask.
writer.add_feature_output(
"quality score", "The quality score of matting result."
)
writer.add_segmentation_output(labels=labels)
return cls(writer) return cls(writer)
def populate(self) -> tuple[bytearray, str]: def populate(self) -> tuple[bytearray, str]:

View File

@ -717,8 +717,8 @@ export class GraphRunner {
* given timestamp, to be parsed into the specified protobuffer type. * given timestamp, to be parsed into the specified protobuffer type.
* @param data The binary (serialized) raw protobuffer data. * @param data The binary (serialized) raw protobuffer data.
* @param protoType The C++ namespaced type this protobuffer data corresponds * @param protoType The C++ namespaced type this protobuffer data corresponds
* to. It will be converted to this type when output as a packet into the * to (e.g. "foo.Bar"). It will be converted to this type when output as a
* graph. * packet into the graph.
* @param streamName The name of the graph input stream to send data into. * @param streamName The name of the graph input stream to send data into.
* @param timestamp The timestamp of the input data, in ms. * @param timestamp The timestamp of the input data, in ms.
*/ */