diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml new file mode 100644 index 000000000..350ccf071 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml new file mode 100644 index 000000000..3626cdf92 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml @@ -0,0 +1,157 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle new file mode 100644 index 000000000..72467514a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle @@ -0,0 +1,53 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 29 + buildToolsVersion "29.0.2" + defaultConfig { + applicationId "com.example.handtrackinggpu" + minSdkVersion 21 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation 'androidx.appcompat:appcompat:1.0.2' + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test.ext:junit:1.1.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' + + implementation "com.google.guava:guava:28.1-jre" + implementation "com.google.flogger:flogger:0.4" + implementation 'com.google.protobuf:protobuf-java:3.5.1' + + compileOnly 'org.glassfish:javax.annotation:10.0-b28' + implementation 'androidx.annotation:annotation:1.1.0' + implementation 'org.jetbrains:annotations:15.0' + +// implementation 'com.intellij:annotations:+@jar' + + implementation "androidx.camera:camera-core:1.0.0-alpha06" + // If you want to use Camera2 extensions + implementation "androidx.camera:camera-camera2:1.0.0-alpha06" + // If you to use the Camera View class + implementation "androidx.camera:camera-view:1.0.0-alpha03" + // If you to use Camera Extensions + implementation "androidx.camera:camera-extensions:1.0.0-alpha03" + +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro new file mode 100644 index 000000000..f1b424510 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml new file mode 100644 index 000000000..7e5e3ad05 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java new file mode 100644 index 000000000..ff9e9c65d --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java @@ -0,0 +1,173 @@ +package com.example.handtrackinggpu; + +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +import android.graphics.SurfaceTexture; +import android.os.Build; +import android.os.Bundle; +import android.util.Size; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.RequiresApi; +import androidx.appcompat.app.AppCompatActivity; + +import com.google.mediapipe.components.CameraHelper; +import com.google.mediapipe.components.CameraXPreviewHelper; +import com.google.mediapipe.components.ExternalTextureConverter; +import com.google.mediapipe.components.FrameProcessor; +import com.google.mediapipe.components.PermissionHelper; +import com.google.mediapipe.framework.AndroidAssetUtil; +import com.google.mediapipe.glutil.EglManager; + +/** Main activity of MediaPipe example apps. */ +public class MainActivity extends AppCompatActivity { + private static final String TAG = "MainActivity"; + + private static final String BINARY_GRAPH_NAME = "handtrackinggpu.binarypb"; + private static final String INPUT_VIDEO_STREAM_NAME = "input_video"; + private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video"; + private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT; + + // Flips the camera-preview frames vertically before sending them into FrameProcessor to be + // processed in a MediaPipe graph, and flips the processed frames back when they are displayed. + // This is needed because OpenGL represents images assuming the image origin is at the bottom-left + // corner, whereas MediaPipe in general assumes the image origin is at top-left. + private static final boolean FLIP_FRAMES_VERTICALLY = true; + + static { + // Load all native libraries needed by the app. + System.loadLibrary("mediapipe_jni"); + System.loadLibrary("opencv_java4"); + } + + // {@link SurfaceTexture} where the camera-preview frames can be accessed. + private SurfaceTexture previewFrameTexture; + // {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph. + private SurfaceView previewDisplayView; + + // Creates and manages an {@link EGLContext}. + private EglManager eglManager; + // Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed + // frames onto a {@link Surface}. + private FrameProcessor processor; + // Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be + // consumed by {@link FrameProcessor} and the underlying MediaPipe graph. + private ExternalTextureConverter converter; + + // Handles camera access via the {@link CameraX} Jetpack support library. + private CameraXPreviewHelper cameraHelper; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + + previewDisplayView = new SurfaceView(this); + setupPreviewDisplayView(); + + // Initialize asset manager so that MediaPipe native libraries can access the app assets, e.g., + // binary graphs. + AndroidAssetUtil.initializeNativeAssetManager(this); + + eglManager = new EglManager(null); + processor = + new FrameProcessor( + this, + eglManager.getNativeContext(), + BINARY_GRAPH_NAME, + INPUT_VIDEO_STREAM_NAME, + OUTPUT_VIDEO_STREAM_NAME); + processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY); + + PermissionHelper.checkAndRequestCameraPermissions(this); + } + + @Override + protected void onResume() { + super.onResume(); + converter = new ExternalTextureConverter(eglManager.getContext()); + converter.setFlipY(FLIP_FRAMES_VERTICALLY); + converter.setConsumer(processor); + if (PermissionHelper.cameraPermissionsGranted(this)) { + startCamera(); + } + } + + @Override + protected void onPause() { + super.onPause(); + converter.close(); + } + + @Override + public void onRequestPermissionsResult( + int requestCode, String[] permissions, int[] grantResults) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults); + } + + private void setupPreviewDisplayView() { + previewDisplayView.setVisibility(View.GONE); + ViewGroup viewGroup = findViewById(R.id.preview_display_layout); + viewGroup.addView(previewDisplayView); + + previewDisplayView + .getHolder() + .addCallback( + new SurfaceHolder.Callback() { + @Override + public void surfaceCreated(SurfaceHolder holder) { + processor.getVideoSurfaceOutput().setSurface(holder.getSurface()); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + // (Re-)Compute the ideal size of the camera-preview display (the area that the + // camera-preview frames get rendered onto, potentially with scaling and rotation) + // based on the size of the SurfaceView that contains the display. + Size viewSize = new Size(width, height); + Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize); + + // Connect the converter to the camera-preview frames as its input (via + // previewFrameTexture), and configure the output width and height as the computed + // display size. + converter.setSurfaceTextureAndAttachToGLContext( + previewFrameTexture, displaySize.getWidth(), displaySize.getHeight()); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + processor.getVideoSurfaceOutput().setSurface(null); + } + }); + } + + private void startCamera() { + cameraHelper = new CameraXPreviewHelper(); + cameraHelper.setOnCameraStartedListener( + surfaceTexture -> { + previewFrameTexture = surfaceTexture; + // Make the display view visible to start showing the preview. This triggers the + // SurfaceHolder.Callback added to (the holder of) previewDisplayView. + previewDisplayView.setVisibility(View.VISIBLE); + }); + cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null); + } +} \ No newline at end of file diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD new file mode 100644 index 000000000..80b65e3d4 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD @@ -0,0 +1,70 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) # Apache 2.0 + +android_library( + name = "android_components", + srcs = glob( + ["*.java"], + exclude = [ + "CameraHelper.java", + "CameraXPreviewHelper.java", + ], + ), + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/java/com/google/mediapipe/glutil", + "//third_party:androidx_appcompat", + "//third_party:androidx_core", + "//third_party:androidx_legacy_support_v4", + "//third_party:androidx_recyclerview", + "@com_google_code_findbugs//jar", + "@com_google_guava_android//jar", + ], +) + +# Note: We need to separate the camera helper files in a different BUILD target because CameraX has a minimum Android API +# requirement of API 21. Users of android_components may have different API dependencies. +android_library( + name = "android_camerax_helper", + srcs = [ + "CameraHelper.java", + "CameraXPreviewHelper.java", + ], + visibility = ["//visibility:public"], + deps = [ + "//third_party:androidx_appcompat", + "//third_party:androidx_legacy_support_v4", + "//third_party:camera2", + "//third_party:camerax_core", + "@androidx_concurrent_futures//jar", + "@androidx_lifecycle//jar", + "@com_google_code_findbugs//jar", + "@com_google_guava_android//jar", + ], +) + +android_library( + name = "android_microphone_helper", + srcs = [ + "MicrophoneHelper.java", + ], + visibility = ["//visibility:public"], + deps = [ + "@com_google_code_findbugs//jar", + "@com_google_guava_android//jar", + ], +) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java new file mode 100644 index 000000000..980ad8754 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java @@ -0,0 +1,63 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.app.Activity; +import android.graphics.SurfaceTexture; +import android.util.Size; +import javax.annotation.Nullable; + +/** Abstract interface for a helper class that manages camera access. */ +public abstract class CameraHelper { + /** The listener is called when camera start is complete. */ + public interface OnCameraStartedListener { + /** + * Called when camera start is complete and the camera-preview frames can be accessed from the + * surfaceTexture. The surfaceTexture can be null if it is not prepared by the CameraHelper. + */ + public void onCameraStarted(@Nullable SurfaceTexture surfaceTexture); + } + + protected static final String TAG = "CameraHelper"; + + /** Represents the direction the camera faces relative to device screen. */ + public static enum CameraFacing { + FRONT, + BACK + }; + + protected OnCameraStartedListener onCameraStartedListener; + + protected CameraFacing cameraFacing; + + /** + * Initializes the camera and sets it up for accessing frames from a custom SurfaceTexture object. + * The SurfaceTexture object can be null when it is the CameraHelper that prepares a + * SurfaceTexture object for grabbing frames. + */ + public abstract void startCamera( + Activity context, CameraFacing cameraFacing, @Nullable SurfaceTexture surfaceTexture); + + /** + * Computes the ideal size of the camera-preview display (the area that the camera-preview frames + * get rendered onto, potentially with scaling and rotation) based on the size of the view + * containing the display. Returns the computed display size. + */ + public abstract Size computeDisplaySizeFromViewSize(Size viewSize); + + public void setOnCameraStartedListener(@Nullable OnCameraStartedListener listener) { + onCameraStartedListener = listener; + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java new file mode 100644 index 000000000..10bf3d1fc --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java @@ -0,0 +1,102 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.app.Activity; +import androidx.lifecycle.LifecycleOwner; +import android.graphics.SurfaceTexture; +import android.util.Log; +import android.util.Size; +import androidx.camera.core.CameraX; +import androidx.camera.core.CameraX.LensFacing; +import androidx.camera.core.Preview; +import androidx.camera.core.PreviewConfig; + +/** + * Uses CameraX APIs for camera setup and access. + * + *

{@link CameraX} connects to the camera and provides video frames. + */ +public class CameraXPreviewHelper extends CameraHelper { + private static final String TAG = "CameraXPreviewHelper"; + + private Preview preview; + + // Size of the camera-preview frames from the camera. + private Size frameSize; + // Rotation of the camera-preview frames in degrees. + private int frameRotation; + + @Override + @SuppressWarnings("RestrictTo") // See b/132705545. + public void startCamera( + Activity context, CameraFacing cameraFacing, SurfaceTexture surfaceTexture) { + LensFacing cameraLensFacing = + cameraFacing == CameraHelper.CameraFacing.FRONT ? LensFacing.FRONT : LensFacing.BACK; + PreviewConfig previewConfig = + new PreviewConfig.Builder().setLensFacing(cameraLensFacing).build(); + preview = new Preview(previewConfig); + + preview.setOnPreviewOutputUpdateListener( + previewOutput -> { + if (!previewOutput.getTextureSize().equals(frameSize)) { + frameSize = previewOutput.getTextureSize(); + frameRotation = previewOutput.getRotationDegrees(); + if (frameSize.getWidth() == 0 || frameSize.getHeight() == 0) { + // Invalid frame size. Wait for valid input dimensions before updating display size. + Log.d(TAG, "Invalid frameSize."); + return; + } + } + if (onCameraStartedListener != null) { + onCameraStartedListener.onCameraStarted(previewOutput.getSurfaceTexture()); + } + }); + CameraX.bindToLifecycle(/*lifecycleOwner=*/ (LifecycleOwner) context, preview); + } + + @Override + public Size computeDisplaySizeFromViewSize(Size viewSize) { + if (viewSize == null || frameSize == null) { + // Wait for all inputs before setting display size. + Log.d(TAG, "viewSize or frameSize is null."); + return null; + } + + // Valid rotation values are 0, 90, 180 and 270. + // Frames are rotated relative to the device's "natural" landscape orientation. When in portrait + // mode, valid rotation values are 90 or 270, and the width/height should be swapped to + // calculate aspect ratio. + float frameAspectRatio = + frameRotation == 90 || frameRotation == 270 + ? frameSize.getHeight() / (float) frameSize.getWidth() + : frameSize.getWidth() / (float) frameSize.getHeight(); + + float viewAspectRatio = viewSize.getWidth() / (float) viewSize.getHeight(); + + // Match shortest sides together. + int scaledWidth; + int scaledHeight; + if (frameAspectRatio < viewAspectRatio) { + scaledWidth = viewSize.getWidth(); + scaledHeight = Math.round(viewSize.getWidth() / frameAspectRatio); + } else { + scaledHeight = viewSize.getHeight(); + scaledWidth = Math.round(viewSize.getHeight() * frameAspectRatio); + } + + return new Size(scaledWidth, scaledHeight); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java new file mode 100644 index 000000000..122f598ea --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java @@ -0,0 +1,373 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.util.Log; +import com.google.mediapipe.framework.AppTextureFrame; +import com.google.mediapipe.glutil.ExternalTextureRenderer; +import com.google.mediapipe.glutil.GlThread; +import com.google.mediapipe.glutil.ShaderUtil; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import javax.microedition.khronos.egl.EGLContext; + +/** + * Textures from {@link SurfaceTexture} are only supposed to be bound to target {@link + * GLES11Ext#GL_TEXTURE_EXTERNAL_OES}, which is accessed using samplerExternalOES in the shader. + * This means they cannot be used with a regular shader that expects a sampler2D. This class creates + * a copy of the texture that can be used with {@link GLES20#GL_TEXTURE_2D} and sampler2D. + */ +public class ExternalTextureConverter implements TextureFrameProducer { + private static final String TAG = "ExternalTextureConv"; // Max length of a tag is 23. + private static final int DEFAULT_NUM_BUFFERS = 2; // Number of output frames allocated. + private static final String THREAD_NAME = "ExternalTextureConverter"; + + private RenderThread thread; + + /** + * Creates the ExternalTextureConverter to create a working copy of each camera frame. + * + * @param numBuffers the number of camera frames that can enter processing simultaneously. + */ + public ExternalTextureConverter(EGLContext parentContext, int numBuffers) { + thread = new RenderThread(parentContext, numBuffers); + thread.setName(THREAD_NAME); + thread.start(); + try { + thread.waitUntilReady(); + } catch (InterruptedException ie) { + // Someone interrupted our thread. This is not supposed to happen: we own + // the thread, and we are not going to interrupt it. Therefore, it is not + // reasonable for this constructor to throw an InterruptedException + // (which is a checked exception). If it should somehow happen that the + // thread is interrupted, let's set the interrupted flag again, log the + // error, and throw a RuntimeException. + Thread.currentThread().interrupt(); + Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage()); + throw new RuntimeException(ie); + } + } + + /** + * Sets vertical flipping of the texture, useful for conversion between coordinate systems with + * top-left v.s. bottom-left origins. This should be called before {@link + * #setSurfaceTexture(SurfaceTexture, int, int)} or {@link + * #setSurfaceTextureAndAttachToGLContext(SurfaceTexture, int, int)}. + */ + public void setFlipY(boolean flip) { + thread.setFlipY(flip); + } + + public ExternalTextureConverter(EGLContext parentContext) { + this(parentContext, DEFAULT_NUM_BUFFERS); + } + + public ExternalTextureConverter( + EGLContext parentContext, SurfaceTexture texture, int targetWidth, int targetHeight) { + this(parentContext); + thread.setSurfaceTexture(texture, targetWidth, targetHeight); + } + + /** + * Sets the input surface texture. + * + *

The provided width and height will be the size of the converted texture, so if the input + * surface texture is rotated (as expressed by its transformation matrix) the provided width and + * height should be swapped. + */ + // TODO: Clean up setSurfaceTexture methods. + public void setSurfaceTexture(SurfaceTexture texture, int width, int height) { + if (texture != null && (width == 0 || height == 0)) { + throw new RuntimeException( + "ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero"); + } + thread.getHandler().post(() -> thread.setSurfaceTexture(texture, width, height)); + } + + // TODO: Clean up setSurfaceTexture methods. + public void setSurfaceTextureAndAttachToGLContext(SurfaceTexture texture, int width, int height) { + if (texture != null && (width == 0 || height == 0)) { + throw new RuntimeException( + "ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero"); + } + thread + .getHandler() + .post(() -> thread.setSurfaceTextureAndAttachToGLContext(texture, width, height)); + } + + @Override + public void setConsumer(TextureFrameConsumer next) { + thread.setConsumer(next); + } + + public void addConsumer(TextureFrameConsumer consumer) { + thread.addConsumer(consumer); + } + + public void removeConsumer(TextureFrameConsumer consumer) { + thread.removeConsumer(consumer); + } + + public void close() { + if (thread == null) { + return; + } + thread.getHandler().post(() -> thread.setSurfaceTexture(null, 0, 0)); + thread.quitSafely(); + try { + thread.join(); + } catch (InterruptedException ie) { + // Set the interrupted flag again, log the error, and throw a RuntimeException. + Thread.currentThread().interrupt(); + Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage()); + throw new RuntimeException(ie); + } + } + + private static class RenderThread extends GlThread + implements SurfaceTexture.OnFrameAvailableListener { + private static final long NANOS_PER_MICRO = 1000; // Nanoseconds in one microsecond. + private volatile SurfaceTexture surfaceTexture = null; + private final List consumers; + private List outputFrames = null; + private int outputFrameIndex = -1; + private ExternalTextureRenderer renderer = null; + private long timestampOffset = 0; + private long previousTimestamp = 0; + + protected int destinationWidth = 0; + protected int destinationHeight = 0; + + public RenderThread(EGLContext parentContext, int numBuffers) { + super(parentContext); + outputFrames = new ArrayList<>(); + outputFrames.addAll(Collections.nCopies(numBuffers, null)); + renderer = new ExternalTextureRenderer(); + consumers = new ArrayList<>(); + } + + public void setFlipY(boolean flip) { + renderer.setFlipY(flip); + } + + public void setSurfaceTexture(SurfaceTexture texture, int width, int height) { + if (surfaceTexture != null) { + surfaceTexture.setOnFrameAvailableListener(null); + } + surfaceTexture = texture; + if (surfaceTexture != null) { + surfaceTexture.setOnFrameAvailableListener(this); + } + destinationWidth = width; + destinationHeight = height; + } + + public void setSurfaceTextureAndAttachToGLContext( + SurfaceTexture texture, int width, int height) { + setSurfaceTexture(texture, width, height); + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + surfaceTexture.attachToGLContext(textures[0]); + } + + public void setConsumer(TextureFrameConsumer consumer) { + synchronized (consumers) { + consumers.clear(); + consumers.add(consumer); + } + } + + public void addConsumer(TextureFrameConsumer consumer) { + synchronized (consumers) { + consumers.add(consumer); + } + } + + public void removeConsumer(TextureFrameConsumer consumer) { + synchronized (consumers) { + consumers.remove(consumer); + } + } + + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + handler.post(() -> renderNext(surfaceTexture)); + } + + @Override + public void prepareGl() { + super.prepareGl(); + + GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + + renderer.setup(); + } + + @Override + public void releaseGl() { + for (int i = 0; i < outputFrames.size(); ++i) { + teardownDestination(i); + } + renderer.release(); + super.releaseGl(); // This releases the EGL context, so must do it after any GL calls. + } + + protected void renderNext(SurfaceTexture fromTexture) { + if (fromTexture != surfaceTexture) { + // Although the setSurfaceTexture and renderNext methods are correctly sequentialized on + // the same thread, the onFrameAvailable callback is not. Therefore, it is possible for + // onFrameAvailable to queue up a renderNext call while a setSurfaceTexture call is still + // pending on the handler. When that happens, we should simply disregard the call. + return; + } + try { + synchronized (consumers) { + boolean frameUpdated = false; + for (TextureFrameConsumer consumer : consumers) { + AppTextureFrame outputFrame = nextOutputFrame(); + // TODO: Switch to ref-counted single copy instead of making additional + // copies blitting to separate textures each time. + updateOutputFrame(outputFrame); + frameUpdated = true; + + if (consumer != null) { + if (Log.isLoggable(TAG, Log.VERBOSE)) { + Log.v( + TAG, + String.format( + "Locking tex: %d width: %d height: %d", + outputFrame.getTextureName(), + outputFrame.getWidth(), + outputFrame.getHeight())); + } + outputFrame.setInUse(); + consumer.onNewFrame(outputFrame); + } + } + if (!frameUpdated) { // Need to update the frame even if there are no consumers. + AppTextureFrame outputFrame = nextOutputFrame(); + // TODO: Switch to ref-counted single copy instead of making additional + // copies blitting to separate textures each time. + updateOutputFrame(outputFrame); + } + } + } finally { + } + } + + private void teardownDestination(int index) { + if (outputFrames.get(index) != null) { + waitUntilReleased(outputFrames.get(index)); + GLES20.glDeleteTextures(1, new int[] {outputFrames.get(index).getTextureName()}, 0); + outputFrames.set(index, null); + } + } + + private void setupDestination(int index) { + teardownDestination(index); + int destinationTextureId = ShaderUtil.createRgbaTexture(destinationWidth, destinationHeight); + Log.d( + TAG, + String.format( + "Created output texture: %d width: %d height: %d", + destinationTextureId, destinationWidth, destinationHeight)); + bindFramebuffer(destinationTextureId, destinationWidth, destinationHeight); + outputFrames.set( + index, new AppTextureFrame(destinationTextureId, destinationWidth, destinationHeight)); + } + + + /** + * Gets next available frame or creates new one if next frame is not initialized + * or cannot be used with current surface texture. + * + *

+ * + * NOTE: must be invoked on GL thread + */ + private AppTextureFrame nextOutputFrame() { + outputFrameIndex = (outputFrameIndex + 1) % outputFrames.size(); + AppTextureFrame outputFrame = outputFrames.get(outputFrameIndex); + // Check if the size has changed. + if (outputFrame == null + || outputFrame.getWidth() != destinationWidth + || outputFrame.getHeight() != destinationHeight) { + // setupDestination will wait for the frame to be released before reallocating it. + setupDestination(outputFrameIndex); + outputFrame = outputFrames.get(outputFrameIndex); + } + waitUntilReleased(outputFrame); + return outputFrame; + } + + /** + * Updates output frame with current pixels of surface texture and corresponding timestamp. + * + * @param outputFrame {@link AppTextureFrame} to populate. + * + * NOTE: must be invoked on GL thread + */ + private void updateOutputFrame(AppTextureFrame outputFrame) { + // Copy surface texture's pixels to output frame + bindFramebuffer(outputFrame.getTextureName(), destinationWidth, destinationHeight); + renderer.render(surfaceTexture); + + // Populate frame timestamp with surface texture timestamp after render() as renderer + // ensures that surface texture has the up-to-date timestamp. (Also adjust |timestampOffset| + // to ensure that timestamps increase monotonically.) + long textureTimestamp = surfaceTexture.getTimestamp() / NANOS_PER_MICRO; + if (textureTimestamp + timestampOffset <= previousTimestamp) { + timestampOffset = previousTimestamp + 1 - textureTimestamp; + } + outputFrame.setTimestamp(textureTimestamp + timestampOffset); + previousTimestamp = outputFrame.getTimestamp(); + } + + private void waitUntilReleased(AppTextureFrame frame) { + try { + if (Log.isLoggable(TAG, Log.VERBOSE)) { + Log.v( + TAG, + String.format( + "Waiting for tex: %d width: %d height: %d", + frame.getTextureName(), frame.getWidth(), frame.getHeight())); + } + frame.waitUntilReleased(); + if (Log.isLoggable(TAG, Log.VERBOSE)) { + Log.v( + TAG, + String.format( + "Finished waiting for tex: %d width: %d height: %d", + frame.getTextureName(), frame.getWidth(), frame.getHeight())); + } + } catch (InterruptedException ie) { + // Someone interrupted our thread. This is not supposed to happen: we own + // the thread, and we are not going to interrupt it. If it should somehow + // happen that the thread is interrupted, let's set the interrupted flag + // again, log the error, and throw a RuntimeException. + Thread.currentThread().interrupt(); + Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage()); + throw new RuntimeException(ie); + } + } + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java new file mode 100644 index 000000000..c63f0495a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java @@ -0,0 +1,303 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.content.Context; +import android.graphics.Bitmap; +import android.util.Log; +import com.google.common.base.Preconditions; +import com.google.mediapipe.framework.AndroidAssetUtil; +import com.google.mediapipe.framework.AndroidPacketCreator; +import com.google.mediapipe.framework.Graph; +import com.google.mediapipe.framework.GraphService; +import com.google.mediapipe.framework.MediaPipeException; +import com.google.mediapipe.framework.Packet; +import com.google.mediapipe.framework.PacketCallback; +import com.google.mediapipe.framework.PacketGetter; +import com.google.mediapipe.framework.SurfaceOutput; +import com.google.mediapipe.framework.TextureFrame; +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.Nullable; + +/** + * A {@link com.google.mediapipe.components.TextureFrameProcessor} that sends video frames through a + * MediaPipe graph. + */ +public class FrameProcessor implements TextureFrameProcessor { + private static final String TAG = "FrameProcessor"; + + private List consumers = new ArrayList<>(); + private Graph mediapipeGraph; + private AndroidPacketCreator packetCreator; + private OnWillAddFrameListener addFrameListener; + private String videoInputStream; + private String videoInputStreamCpu; + private String videoOutputStream; + private SurfaceOutput videoSurfaceOutput; + private final AtomicBoolean started = new AtomicBoolean(false); + private boolean hybridPath = false; + + /** + * Constructor. + * + * @param context an Android {@link Context}. + * @param parentNativeContext a native handle to a GL context. The GL context(s) used by the + * calculators in the graph will join the parent context's sharegroup, so that textures + * generated by the calculators are available in the parent context, and vice versa. + * @param graphName the name of the file containing the binary representation of the graph. + * @param inputStream the graph input stream that will receive input video frames. + * @param outputStream the output stream from which output frames will be produced. + */ + public FrameProcessor( + Context context, + long parentNativeContext, + String graphName, + String inputStream, + String outputStream) { + mediapipeGraph = new Graph(); + videoInputStream = inputStream; + videoOutputStream = outputStream; + + try { + if (new File(graphName).isAbsolute()) { + mediapipeGraph.loadBinaryGraph(graphName); + } else { + mediapipeGraph.loadBinaryGraph( + AndroidAssetUtil.getAssetBytes(context.getAssets(), graphName)); + } + + packetCreator = new AndroidPacketCreator(mediapipeGraph); + mediapipeGraph.addPacketCallback( + videoOutputStream, + new PacketCallback() { + @Override + public void process(Packet packet) { + List currentConsumers; + synchronized (this) { + currentConsumers = consumers; + } + for (TextureFrameConsumer consumer : currentConsumers) { + TextureFrame frame = PacketGetter.getTextureFrame(packet); + if (Log.isLoggable(TAG, Log.VERBOSE)) { + Log.v( + TAG, + String.format( + "Output tex: %d width: %d height: %d to consumer %h", + frame.getTextureName(), frame.getWidth(), frame.getHeight(), consumer)); + } + consumer.onNewFrame(frame); + } + } + }); + + mediapipeGraph.setParentGlContext(parentNativeContext); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + + videoSurfaceOutput = mediapipeGraph.addSurfaceOutput(videoOutputStream); + } + + /** + * Interface to be used so that this class can receive a callback when onNewFrame has determined + * it will process an input frame. Can be used to feed packets to accessory streams. + */ + public interface OnWillAddFrameListener { + void onWillAddFrame(long timestamp); + } + + public synchronized void setServiceObject(GraphService service, T object) { + mediapipeGraph.setServiceObject(service, object); + } + + public void setInputSidePackets(Map inputSidePackets) { + Preconditions.checkState( + !started.get(), "setInputSidePackets must be called before the graph is started"); + mediapipeGraph.setInputSidePackets(inputSidePackets); + } + + @Override + public void setConsumer(TextureFrameConsumer listener) { + synchronized (this) { + consumers = Arrays.asList(listener); + } + } + + public void setVideoInputStreamCpu(String inputStream) { + videoInputStreamCpu = inputStream; + } + + public void setHybridPath() { + hybridPath = true; + } + + public void addConsumer(TextureFrameConsumer listener) { + synchronized (this) { + List newConsumers = new ArrayList<>(consumers); + newConsumers.add(listener); + consumers = newConsumers; + } + } + + public boolean removeConsumer(TextureFrameConsumer listener) { + boolean existed; + synchronized (this) { + List newConsumers = new ArrayList<>(consumers); + existed = newConsumers.remove(listener); + consumers = newConsumers; + } + return existed; + } + + /** Gets the {@link Graph} used to run the graph. */ + public Graph getGraph() { + return mediapipeGraph; + } + + /** Gets the {@link PacketCreator} associated with the graph. */ + public AndroidPacketCreator getPacketCreator() { + return packetCreator; + } + + /** Gets the {@link SurfaceOutput} connected to the video output stream. */ + public SurfaceOutput getVideoSurfaceOutput() { + return videoSurfaceOutput; + } + + /** Closes and cleans up the graph. */ + public void close() { + if (started.get()) { + try { + mediapipeGraph.closeAllPacketSources(); + mediapipeGraph.waitUntilGraphDone(); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + try { + mediapipeGraph.tearDown(); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + } + } + + /** + * Initializes the graph in advance of receiving frames. + * + *

Normally the graph is initialized when the first frame arrives. You can optionally call this + * method to initialize it ahead of time. + * @throws MediaPipeException for any error status. + */ + public void preheat() { + if (!started.getAndSet(true)) { + startGraph(); + } + } + + public void setOnWillAddFrameListener(@Nullable OnWillAddFrameListener addFrameListener) { + this.addFrameListener = addFrameListener; + } + + /** + * Returns true if the MediaPipe graph can accept one more input frame. + * @throws MediaPipeException for any error status. + */ + private boolean maybeAcceptNewFrame() { + if (!started.getAndSet(true)) { + startGraph(); + } + return true; + } + + @Override + public void onNewFrame(final TextureFrame frame) { + if (Log.isLoggable(TAG, Log.VERBOSE)) { + Log.v( + TAG, + String.format( + "Input tex: %d width: %d height: %d", + frame.getTextureName(), frame.getWidth(), frame.getHeight())); + } + + if (!maybeAcceptNewFrame()) { + frame.release(); + return; + } + + if (addFrameListener != null) { + addFrameListener.onWillAddFrame(frame.getTimestamp()); + } + + Packet imagePacket = packetCreator.createGpuBuffer(frame); + + try { + // addConsumablePacketToInputStream allows the graph to take exclusive ownership of the + // packet, which may allow for more memory optimizations. + mediapipeGraph.addConsumablePacketToInputStream( + videoInputStream, imagePacket, frame.getTimestamp()); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + imagePacket.release(); + } + + /** + * Accepts a Bitmap to be sent to main input stream at the given timestamp. + * + *

Note: This requires a graph that takes an ImageFrame instead of a mediapipe::GpuBuffer. An + * instance of FrameProcessor should only ever use this or the other variant for onNewFrame(). + */ + public void onNewFrame(final Bitmap bitmap, long timestamp) { + if (!maybeAcceptNewFrame()) { + return; + } + + if (!hybridPath && addFrameListener != null) { + addFrameListener.onWillAddFrame(timestamp); + } + + Packet packet = getPacketCreator().createRgbImageFrame(bitmap); + + try { + // addConsumablePacketToInputStream allows the graph to take exclusive ownership of the + // packet, which may allow for more memory optimizations. + mediapipeGraph.addConsumablePacketToInputStream(videoInputStreamCpu, packet, timestamp); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + packet.release(); + } + + public void waitUntilIdle() { + try { + mediapipeGraph.waitUntilGraphIdle(); + } catch (MediaPipeException e) { + Log.e(TAG, "Mediapipe error: ", e); + } + } + + /** + * Starts running the MediaPipe graph. + * @throws MediaPipeException for any error status. + */ + private void startGraph() { + mediapipeGraph.startRunningGraph(); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java new file mode 100644 index 000000000..9a4764db2 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java @@ -0,0 +1,295 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.AudioTimestamp; +import android.media.MediaRecorder.AudioSource; +import android.os.Build.VERSION; +import android.os.Build.VERSION_CODES; +import android.util.Log; +import javax.annotation.Nullable; + +/** Provides access to audio data from a microphone. */ +public class MicrophoneHelper { + /** The listener is called when audio data from the microphone is available. */ + public interface OnAudioDataAvailableListener { + public void onAudioDataAvailable(byte[] audioData, long timestampMicros); + } + + private static final String TAG = "MicrophoneHelper"; + + private static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT; + private static final int AUDIO_SOURCE = AudioSource.MIC; + + // A small constant valued multiplier for setting bufferSize. This is useful + // to reduce buffer overflows when a lot of data needs to be read at a high + // sample rate from the audio stream. Note that it is desirable to keep this + // multiplier small, because very large buffer sizes can slow down blocking + // calls to AudioRecord.read(...) when the sample rate is low for instance. + private static final int BUFFER_SIZE_MULTIPLIER = 2; + + // A small constant value to decide the number of seconds of audio data that + // will be read in a single AudioRecord.read(...) call when + // AudioRecord.minBufferSize(...) is unavailable. Smaller values for this + // constant favor faster blocking calls to AudioRecord.read(...). + private static final int MAX_READ_INTERVAL_SEC = 1; + + // This class uses AudioFormat.ENCODING_PCM_16BIT, i.e. 16 bits per single channel sample. + private static final int BYTES_PER_MONO_SAMPLE = 2; + + private static final long UNINITIALIZED_TIMESTAMP = -1; + private static final long NANOS_PER_MICROS = 1000; + private static final long MICROS_PER_SECOND = 1000000; + + // Number of audio samples recorded per second. + private final int sampleRateInHz; + // Channel configuration of audio source, one of AudioRecord.CHANNEL_IN_MONO or + // AudioRecord.CHANNEL_IN_STEREO. + private final int channelConfig; + // Data storage allocated to record audio samples in a single function call to AudioRecord.read(). + private final int bufferSize; + // Bytes used per sample, accounts for number of channels of audio source. Possible values are 2 + // bytes for a 1-channel sample and 4 bytes for a 2-channel sample. + private final int bytesPerSample; + + private byte[] audioData; + + // Timestamp provided by the AudioTimestamp object. + private AudioTimestamp audioTimestamp; + // Initial timestamp base. Can be set by the client so that all timestamps calculated using the + // number of samples read per AudioRecord.read() function call start from this timestamp. + private long initialTimestamp = UNINITIALIZED_TIMESTAMP; + // The total number of samples read from multiple calls to AudioRecord.read(). This is reset to + // zero for every startMicrophone() call. + private long totalNumSamplesRead; + + // AudioRecord is used to setup a way to record data from the audio source. See + // https://developer.android.com/reference/android/media/AudioRecord.htm for details. + private AudioRecord audioRecord; + // Data is read on a separate non-blocking thread. + private Thread recordingThread; + + // This flag determines if audio will be read from the audio source and if the data read will be + // sent to the listener of this class. + private boolean recording = false; + + // This listener is provided with the data read on every AudioRecord.read() call. If the listener + // called stopRecording() while a call to AudioRecord.read() was blocked, the class will discard + // the data read after recording stopped. + private OnAudioDataAvailableListener onAudioDataAvailableListener; + + /** + * MicrophoneHelper class constructor. Arugments: + * + * @param sampleRateInHz Number of samples per second to be read from audio stream. + * @param channelConfig Configuration of audio channels. See + * https://developer.android.com/reference/android/media/AudioRecord.html#public-constructors_1. + */ + public MicrophoneHelper(int sampleRateInHz, int channelConfig) { + this.sampleRateInHz = sampleRateInHz; + this.channelConfig = channelConfig; + + // Number of channels of audio source, depending on channelConfig. + final int channelCount = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1; + + bytesPerSample = BYTES_PER_MONO_SAMPLE * channelCount; + + // The minimum buffer size required by AudioRecord. + final int minBufferSize = + AudioRecord.getMinBufferSize( + sampleRateInHz, channelConfig, /*audioFormat=*/ AUDIO_ENCODING); + + // Set bufferSize. If the minimum buffer size permitted by the hardware is + // unavailable, use the the sampleRateInHz value as the number of bytes. + // This is arguably better than another arbitrary constant because a higher + // value of sampleRateInHz implies the need for reading large chunks of data + // from the audio stream in each AudioRecord.read(...) call. + if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { + Log.e(TAG, "AudioRecord minBufferSize unavailable."); + bufferSize = sampleRateInHz * MAX_READ_INTERVAL_SEC * bytesPerSample * BUFFER_SIZE_MULTIPLIER; + } else { + bufferSize = minBufferSize * BUFFER_SIZE_MULTIPLIER; + } + } + + private void setupAudioRecord() { + audioData = new byte[bufferSize]; + + Log.d(TAG, "AudioRecord(" + sampleRateInHz + ", " + bufferSize + ")"); + audioRecord = + new AudioRecord.Builder() + .setAudioSource(AUDIO_SOURCE) + .setAudioFormat( + new AudioFormat.Builder() + .setEncoding(AUDIO_ENCODING) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSize) + .build(); + + if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + audioRecord.release(); + Log.e(TAG, "AudioRecord could not open."); + return; + } + + recordingThread = + new Thread( + () -> { + android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); + Log.v(TAG, "Running audio recording thread."); + + // Initial timestamp in case the AudioRecord.getTimestamp() function is unavailable. + long startTimestamp = initialTimestamp != UNINITIALIZED_TIMESTAMP + ? initialTimestamp + : System.nanoTime() / NANOS_PER_MICROS; + long sampleBasedTimestamp; + while (recording) { + if (audioRecord == null) { + break; + } + final int numBytesRead = + audioRecord.read(audioData, /*offsetInBytes=*/ 0, /*sizeInBytes=*/ bufferSize); + // If AudioRecord.getTimestamp() is unavailable, calculate the timestamp using the + // number of samples read in the call to AudioRecord.read(). + long sampleBasedFallbackTimestamp = + startTimestamp + totalNumSamplesRead * MICROS_PER_SECOND / sampleRateInHz; + sampleBasedTimestamp = + getTimestamp(/*fallbackTimestamp=*/sampleBasedFallbackTimestamp); + if (numBytesRead <= 0) { + if (numBytesRead == AudioRecord.ERROR_INVALID_OPERATION) { + Log.e(TAG, "ERROR_INVALID_OPERATION"); + } else if (numBytesRead == AudioRecord.ERROR_BAD_VALUE) { + Log.e(TAG, "ERROR_BAD_VALUE"); + } + continue; + } + Log.v(TAG, "Read " + numBytesRead + " bytes of audio data."); + + // Confirm that the listener is still interested in receiving audio data and + // stopMicrophone() wasn't called. If the listener called stopMicrophone(), discard + // the data read in the latest AudioRecord.read(...) function call. + if (recording) { + onAudioDataAvailableListener.onAudioDataAvailable( + audioData.clone(), sampleBasedTimestamp); + } + + // TODO: Replace byte[] with short[] audioData. + // It is expected that audioRecord.read() will read full samples and therefore + // numBytesRead is expected to be a multiple of bytesPerSample. + int numSamplesRead = numBytesRead / bytesPerSample; + totalNumSamplesRead += numSamplesRead; + } + }); + } + + // If AudioRecord.getTimestamp() is available and returns without error, this function returns the + // timestamp using AudioRecord.getTimestamp(). If the function is unavailable, it returns a + // fallbackTimestamp provided as an argument to this method. + private long getTimestamp(long fallbackTimestamp) { + // AudioRecord.getTimestamp is only available at API Level 24 and above. + // https://developer.android.com/reference/android/media/AudioRecord.html#getTimestamp(android.media.AudioTimestamp,%20int). + if (VERSION.SDK_INT >= VERSION_CODES.N) { + if (audioTimestamp == null) { + audioTimestamp = new AudioTimestamp(); + } + int status = audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC); + if (status == AudioRecord.SUCCESS) { + return audioTimestamp.nanoTime / NANOS_PER_MICROS; + } else { + Log.e(TAG, "audioRecord.getTimestamp failed with status: " + status); + } + } + return fallbackTimestamp; + } + + // Returns the buffer size read by this class per AudioRecord.read() call. + public int getBufferSize() { + return bufferSize; + } + + /** + * Overrides the use of system time as the source of timestamps for audio packets. Not + * recommended. Provided to maintain compatibility with existing usage by CameraRecorder. + */ + public void setInitialTimestamp(long initialTimestamp) { + this.initialTimestamp = initialTimestamp; + } + + // This method sets up a new AudioRecord object for reading audio data from the microphone. It + // can be called multiple times to restart the recording if necessary. + public void startMicrophone() { + if (recording) { + return; + } + + setupAudioRecord(); + audioRecord.startRecording(); + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + Log.e(TAG, "AudioRecord couldn't start recording."); + audioRecord.release(); + return; + } + + recording = true; + totalNumSamplesRead = 0; + recordingThread.start(); + + Log.d(TAG, "AudioRecord is recording audio."); + } + + // Stops the AudioRecord object from reading data from the microphone and releases it. + public void stopMicrophone() { + stopMicrophoneWithoutCleanup(); + cleanup(); + Log.d(TAG, "AudioRecord stopped recording audio."); + } + + // Stops the AudioRecord object from reading data from the microphone. + public void stopMicrophoneWithoutCleanup() { + if (!recording) { + return; + } + + recording = false; + try { + if (recordingThread != null) { + recordingThread.join(); + } + } catch (InterruptedException ie) { + Log.e(TAG, "Exception: ", ie); + } + + audioRecord.stop(); + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) { + Log.e(TAG, "AudioRecord.stop() didn't run properly."); + } + } + + // Releases the AudioRecord object when there is no ongoing recording. + public void cleanup() { + if (recording) { + return; + } + audioRecord.release(); + } + + public void setOnAudioDataAvailableListener(@Nullable OnAudioDataAvailableListener listener) { + onAudioDataAvailableListener = listener; + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java new file mode 100644 index 000000000..976200988 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java @@ -0,0 +1,93 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import android.Manifest; +import android.app.Activity; +import android.content.pm.PackageManager; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import android.util.Log; + +/** Manages camera permission request and handling. */ +public class PermissionHelper { + private static final String TAG = "PermissionHelper"; + + private static final String AUDIO_PERMISSION = Manifest.permission.RECORD_AUDIO; + + private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA; + + private static final int REQUEST_CODE = 0; + + public static boolean permissionsGranted(Activity context, String[] permissions) { + for (String permission : permissions) { + int permissionStatus = ContextCompat.checkSelfPermission(context, permission); + if (permissionStatus != PackageManager.PERMISSION_GRANTED) { + return false; + } + } + return true; + } + + public static void checkAndRequestPermissions(Activity context, String[] permissions) { + if (!permissionsGranted(context, permissions)) { + ActivityCompat.requestPermissions(context, permissions, REQUEST_CODE); + } + } + + /** Called by context to check if camera permissions have been granted. */ + public static boolean cameraPermissionsGranted(Activity context) { + return permissionsGranted(context, new String[] {CAMERA_PERMISSION}); + } + + /** + * Called by context to check if camera permissions have been granted and if not, request them. + */ + public static void checkAndRequestCameraPermissions(Activity context) { + Log.d(TAG, "checkAndRequestCameraPermissions"); + checkAndRequestPermissions(context, new String[] {CAMERA_PERMISSION}); + } + + /** Called by context to check if audio permissions have been granted. */ + public static boolean audioPermissionsGranted(Activity context) { + return permissionsGranted(context, new String[] {AUDIO_PERMISSION}); + } + + /** Called by context to check if audio permissions have been granted and if not, request them. */ + public static void checkAndRequestAudioPermissions(Activity context) { + Log.d(TAG, "checkAndRequestAudioPermissions"); + checkAndRequestPermissions(context, new String[] {AUDIO_PERMISSION}); + } + + /** Called by context when permissions request has been completed. */ + public static void onRequestPermissionsResult( + int requestCode, String[] permissions, int[] grantResults) { + Log.d(TAG, "onRequestPermissionsResult"); + if (permissions.length > 0 && grantResults.length != permissions.length) { + Log.d(TAG, "Permission denied."); + return; + } + for (int i = 0; i < grantResults.length; ++i) { + if (grantResults[i] == PackageManager.PERMISSION_GRANTED) { + Log.d(TAG, permissions[i] + " permission granted."); + } + } + // Note: We don't need any special callbacks when permissions are ready because activities + // using this helper class can have code in onResume() which is called after the + // permissions dialog box closes. The code can be branched depending on if permissions are + // available via permissionsGranted(Activity). + return; + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java new file mode 100644 index 000000000..4c62ebbcb --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java @@ -0,0 +1,23 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +import com.google.mediapipe.framework.TextureFrame; + +/** Lightweight abstraction for an object that can receive video frames. */ +public interface TextureFrameConsumer { + /** Called when a new {@link TextureFrame} is available. */ + public abstract void onNewFrame(TextureFrame frame); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java new file mode 100644 index 000000000..65d4a8b55 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java @@ -0,0 +1,21 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +/** + * Lightweight abstraction for an object that can receive video frames, process them, and pass them + * on to another object. + */ +public interface TextureFrameProcessor extends TextureFrameProducer, TextureFrameConsumer {} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java new file mode 100644 index 000000000..dcc73c749 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java @@ -0,0 +1,21 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.components; + +/** Lightweight abstraction for an object that can produce video frames. */ +public interface TextureFrameProducer { + /** Set the consumer that receives the output from this producer. */ + void setConsumer(TextureFrameConsumer next); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java new file mode 100644 index 000000000..c3a053250 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java @@ -0,0 +1,60 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.content.Context; +import android.content.res.AssetManager; +import com.google.common.io.ByteStreams; +import java.io.IOException; +import java.io.InputStream; + +/** + * Helper methods for handling Android assets. + */ +public final class AndroidAssetUtil { + /** + * Returns an asset's contents as a byte array. This is meant to be used in combination with + * {@link Graph#loadBinaryGraph}. + * + * @param assetName The name of an asset, same as in {@link AssetManager#open(String)}. + */ + public static byte[] getAssetBytes(AssetManager assets, String assetName) { + byte[] assetData; + try { + InputStream stream = assets.open(assetName); + assetData = ByteStreams.toByteArray(stream); + stream.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + return assetData; + } + + /** + * Initializes the native asset manager, which is used by native code to access assets directly. + * + *

Note: When possible, using {@link AssetCache} is preferred for portability, since it does + * not require any special handling for Android assets on the native code side. + */ + public static boolean initializeNativeAssetManager(Context androidContext) { + return nativeInitializeAssetManager( + androidContext, androidContext.getCacheDir().getAbsolutePath()); + } + + private static native boolean nativeInitializeAssetManager( + Context androidContext, String cacheDirPath); + + private AndroidAssetUtil() {} +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java new file mode 100644 index 000000000..5e1a7a135 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java @@ -0,0 +1,60 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.graphics.Bitmap; + +// TODO: use Preconditions in this file. +/** + * Android-specific subclass of PacketCreator. + * + *

See {@link PacketCreator} for general information. + * + *

This class contains methods that are Android-specific. You can (and should) use the base + * PacketCreator on Android if you do not need any methods from this class. + */ +public class AndroidPacketCreator extends PacketCreator { + public AndroidPacketCreator(Graph context) { + super(context); + } + + /** Creates a 3 channel RGB ImageFrame packet from a {@link Bitmap}. */ + public Packet createRgbImageFrame(Bitmap bitmap) { + if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) { + throw new RuntimeException("bitmap must use ARGB_8888 config."); + } + return Packet.create(nativeCreateRgbImageFrame(mediapipeGraph.getNativeHandle(), bitmap)); + } + + /** Creates a 4 channel RGBA ImageFrame packet from a {@link Bitmap}. */ + public Packet createRgbaImageFrame(Bitmap bitmap) { + if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) { + throw new RuntimeException("bitmap must use ARGB_8888 config."); + } + return Packet.create(nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), bitmap)); + } + + /** + * Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on + * failure. + */ + private native long nativeCreateRgbImageFrame(long context, Bitmap bitmap); + + /** + * Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on + * failure. + */ + private native long nativeCreateRgbaImageFrame(long context, Bitmap bitmap); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java new file mode 100644 index 000000000..55357300e --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java @@ -0,0 +1,69 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.graphics.Bitmap; +import com.google.common.flogger.FluentLogger; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** + * Android-specific subclass of PacketGetter. + * + *

See {@link PacketGetter} for general information. + * + *

This class contains methods that are Android-specific. + */ +public final class AndroidPacketGetter { + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + + /** Gets an {@code ARGB_8888} bitmap from an RGB mediapipe image frame packet. */ + public static Bitmap getBitmapFromRgb(Packet packet) { + int width = PacketGetter.getImageWidth(packet); + int height = PacketGetter.getImageHeight(packet); + ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4); + PacketGetter.getRgbaFromRgb(packet, buffer); + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(buffer); + return bitmap; + } + + /** + * Gets an {@code ARGB_8888} bitmap from an RGBA mediapipe image frame packet. Returns null in + * case of failure. + */ + public static Bitmap getBitmapFromRgba(Packet packet) { + // TODO: unify into a single getBitmap call. + // TODO: use NDK Bitmap access instead of copyPixelsToBuffer. + int width = PacketGetter.getImageWidth(packet); + int height = PacketGetter.getImageHeight(packet); + ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4); + buffer.order(ByteOrder.nativeOrder()); + // Note: even though the Android Bitmap config is named ARGB_8888, the data + // is stored as RGBA internally. + boolean status = PacketGetter.getImageData(packet, buffer); + if (!status) { + logger.atSevere().log( + "Got error from getImageData, returning null Bitmap. Image width %d, height %d", + width, height); + return null; + } + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(buffer); + return bitmap; + } + + private AndroidPacketGetter() {} +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java new file mode 100644 index 000000000..20cb81982 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java @@ -0,0 +1,157 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * A {@link TextureFrame} that represents a texture produced by the application. + * + *

The {@link #waitUntilReleased()} method can be used to wait for the consumer to be done with + * the texture before destroying or overwriting it. + * + *

With this class, your application is the producer. The consumer can be MediaPipe (if you send + * the frame into a MediaPipe graph using {@link PacketCreator#createGpuBuffer(TextureFrame)}) or + * your application (if you just hand it to another part of your application without going through + * MediaPipe). + */ +public class AppTextureFrame implements TextureFrame { + private int textureName; + private int width; + private int height; + private long timestamp = Long.MIN_VALUE; + private boolean inUse = false; + private boolean legacyInUse = false; // This ignores GL context sync. + private GlSyncToken releaseSyncToken = null; + + public AppTextureFrame(int textureName, int width, int height) { + this.textureName = textureName; + this.width = width; + this.height = height; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + @Override + public int getTextureName() { + return textureName; + } + + @Override + public int getWidth() { + return width; + } + + @Override + public int getHeight() { + return height; + } + + @Override + public long getTimestamp() { + return timestamp; + } + + /** + * Waits until the consumer is done with the texture. + * @throws InterruptedException + */ + public void waitUntilReleased() throws InterruptedException { + synchronized (this) { + while (inUse && releaseSyncToken == null) { + wait(); + } + if (releaseSyncToken != null) { + releaseSyncToken.waitOnCpu(); + releaseSyncToken.release(); + inUse = false; + releaseSyncToken = null; + } + } + } + + /** + * Returns whether the texture is currently in use. + * + * @deprecated this ignores cross-context sync. You should use {@link waitUntilReleased} instead, + * because cross-context sync cannot be supported efficiently using this API. + */ + @Deprecated + public boolean getInUse() { + synchronized (this) { + return legacyInUse; + } + } + + /** + * Marks the texture as currently in use. + *

The producer calls this before handing the texture off to the consumer. + */ + public void setInUse() { + synchronized (this) { + if (releaseSyncToken != null) { + releaseSyncToken.release(); + releaseSyncToken = null; + } + inUse = true; + legacyInUse = true; + } + } + + /** + * Marks the texture as no longer in use. + *

The consumer calls this when it is done using the texture. + */ + @Override + public void release() { + synchronized (this) { + inUse = false; + legacyInUse = false; + notifyAll(); + } + } + + /** + * Called by MediaPipe when the texture has been released. + * + *

The sync token can be used to ensure that the GPU is done reading from the texture. + */ + @Override + public void release(GlSyncToken syncToken) { + synchronized (this) { + if (releaseSyncToken != null) { + releaseSyncToken.release(); + releaseSyncToken = null; + } + releaseSyncToken = syncToken; + // Note: we deliberately do not set inUse to false here. Clients should call + // waitUntilReleased. See deprecation notice on getInUse. + legacyInUse = false; + notifyAll(); + } + } + + @Override + public void finalize() { + // Note: we do not normally want to rely on finalize to dispose of native objects. In this + // case, however, the object is normally disposed of in the wait method; the finalize method + // serves as a fallback in case the application simply drops the object. The token object is + // small, so even if its destruction is delayed, it's not a huge problem. + if (releaseSyncToken != null) { + releaseSyncToken.release(); + releaseSyncToken = null; + } + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java new file mode 100644 index 000000000..1702f4d4b --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java @@ -0,0 +1,206 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.content.Context; +import android.content.pm.PackageManager.NameNotFoundException; +import android.content.res.AssetManager; +import androidx.annotation.VisibleForTesting; +import android.text.TextUtils; +import com.google.common.base.Preconditions; +import com.google.common.flogger.FluentLogger; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import javax.annotation.Nullable; + +/** + * A singleton class to help accessing assets as normal files in native code. + * + *

This class extracts Android assets as files in a cache directory so that they can be accessed + * by code that expects a regular file path. + * + *

The cache is automatically purged when the versionCode in the app's manifest changes, to avoid + * using stale assets. If a versionCode is not specified, the cache is disabled. + */ +public class AssetCache { + + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + @VisibleForTesting static final String MEDIAPIPE_ASSET_CACHE_DIR = "mediapipe_asset_cache"; + private static AssetCache assetCache; + private int appVersionCode; + private AssetCacheDbHelper versionDatabase; + private Context context; + + /** + * Create {@link AssetCache} with an Android context. + * + *

Asset manager needs context to access the asset files. {@link Create} can be called in the + * main activity. + */ + public static synchronized AssetCache create(Context context) { + Preconditions.checkNotNull(context); + if (assetCache == null) { + assetCache = new AssetCache(context); + } + return assetCache; + } + + /** + * Purge the cached assets. + * + *

This should only be needed in local dev builds that do not update the versionCode in the + * app's manifest. + */ + public static synchronized void purgeCache(Context context) { + AssetCacheDbHelper dbHelper = new AssetCacheDbHelper(context); + dbHelper.invalidateCache(-1); + dbHelper.close(); + } + + /** + * Get {@link AssetCache} without context. + * + *

If not created, {@code null} is returned. + */ + @Nullable + public static synchronized AssetCache getAssetCache() { + return assetCache; + } + + /** + * Loads all the assets in a given assets path. + * @param assetsPath the assets path from which to load. + */ + public synchronized void loadAllAssets(String assetsPath) { + Preconditions.checkNotNull(assetsPath); + + AssetManager assetManager = context.getAssets(); + String[] assetFiles = null; + try { + assetFiles = assetManager.list(assetsPath); + } catch (IOException e) { + logger.atSevere().withCause(e).log("Unable to get files in assets path: %s", assetsPath); + } + if (assetFiles == null || assetFiles.length == 0) { + logger.atWarning().log("No files to load"); + return; + } + + for (String file : assetFiles) { + // If a path was specified, prepend it to the filename with "/", otherwise, just + // use the file name. + String path = TextUtils.isEmpty(assetsPath) ? file : assetsPath + "/" + file; + getAbsolutePathFromAsset(path); + } + } + + /** + * Get the absolute path for an asset file. + * + *

The asset will be unpacked to the application's files directory if not already done. + * + * @param assetPath path to a file under asset. + * @return the absolute file system path to the unpacked asset file. + */ + public synchronized String getAbsolutePathFromAsset(String assetPath) { + AssetManager assetManager = context.getAssets(); + File destinationDir = getDefaultMediaPipeCacheDir(); + destinationDir.mkdir(); + File assetFile = new File(assetPath); + String assetName = assetFile.getName(); + File destinationFile = new File(destinationDir.getPath(), assetName); + // If app version code is not defined, we don't use cache. + if (destinationFile.exists() && appVersionCode != 0 + && versionDatabase.checkVersion(assetPath, appVersionCode)) { + return destinationFile.getAbsolutePath(); + } + InputStream inStream = null; + try { + inStream = assetManager.open(assetPath); + writeStreamToFile(inStream, destinationFile); + } catch (IOException ioe) { + logger.atSevere().log("Unable to unpack: %s", assetPath); + try { + if (inStream != null) { + inStream.close(); + } + } catch (IOException ioe2) { + return null; + } + return null; + } + // If app version code is not defined, we don't use cache. + if (appVersionCode != 0) { + versionDatabase.insertAsset(assetPath, destinationFile.getAbsolutePath(), appVersionCode); + } + return destinationFile.getAbsolutePath(); + } + + /** + * Return all the file names of the assets that were saved to cache from the application's + * resources. + */ + public synchronized String[] getAvailableAssets() { + File assetsDir = getDefaultMediaPipeCacheDir(); + if (assetsDir.exists()) { + return assetsDir.list(); + } + return new String[0]; + } + + /** + * Returns the default cache directory used by the AssetCache to store the assets. + */ + public File getDefaultMediaPipeCacheDir() { + return new File(context.getCacheDir(), MEDIAPIPE_ASSET_CACHE_DIR); + } + + private AssetCache(Context context) { + this.context = context; + versionDatabase = new AssetCacheDbHelper(context); + try { + appVersionCode = context.getPackageManager() + .getPackageInfo(context.getPackageName(), 0).versionCode; + logger.atInfo().log("Current app version code: %d", appVersionCode); + } catch (NameNotFoundException e) { + throw new RuntimeException("Can't get app version code.", e); + } + // Remove all the cached items that don't agree with the current app version. + versionDatabase.invalidateCache(appVersionCode); + } + + private static void writeStreamToFile(InputStream inStream, File destinationFile) + throws IOException { + final int bufferSize = 1000; + FileOutputStream outStream = null; + try { + outStream = new FileOutputStream(destinationFile); + byte[] buffer = new byte[bufferSize]; + while (true) { + int n = inStream.read(buffer); + if (n == -1) { + break; + } + outStream.write(buffer, 0, n); + } + } finally { + if (outStream != null) { + outStream.close(); + } + } + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java new file mode 100644 index 000000000..55c628088 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java @@ -0,0 +1,175 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.content.ContentValues; +import android.content.Context; +import android.database.Cursor; +import android.database.sqlite.SQLiteDatabase; +import android.database.sqlite.SQLiteOpenHelper; +import android.provider.BaseColumns; +import com.google.common.flogger.FluentLogger; +import java.io.File; + +/** + * Database to keep the cached version of asset valid. + */ +public class AssetCacheDbHelper extends SQLiteOpenHelper { + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + + public static final int DATABASE_VERSION = 2; + public static final String DATABASE_NAME = "mediapipe.db"; + + private static final String INT_TYPE = " INTEGER"; + private static final String TEXT_TYPE = " TEXT"; + private static final String TEXT_UNIQUE_TYPE = " TEXT NOT NULL UNIQUE"; + private static final String COMMA_SEP = ","; + private static final String SQL_CREATE_TABLE = + "CREATE TABLE " + AssetCacheEntry.TABLE_NAME + " (" + + AssetCacheEntry._ID + " INTEGER PRIMARY KEY," + + AssetCacheEntry.COLUMN_NAME_ASSET + TEXT_UNIQUE_TYPE + COMMA_SEP + + AssetCacheEntry.COLUMN_NAME_CACHE_PATH + TEXT_TYPE + COMMA_SEP + + AssetCacheEntry.COLUMN_NAME_VERSION + INT_TYPE + " )"; + + private static final String SQL_DELETE_TABLE = + "DROP TABLE IF EXISTS " + AssetCacheEntry.TABLE_NAME; + + /** + * The columns in the AssetVersion table. + */ + public abstract static class AssetCacheEntry implements BaseColumns { + public static final String TABLE_NAME = "AssetVersion"; + public static final String COLUMN_NAME_ASSET = "asset"; + public static final String COLUMN_NAME_CACHE_PATH = "cache_path"; + public static final String COLUMN_NAME_VERSION = "version"; + } + + public AssetCacheDbHelper(Context context) { + super(context, DATABASE_NAME, null, DATABASE_VERSION); + } + + /** + * Check if the cached version is current in database. + * + * @return true if the asset is cached and the app is not upgraded. Otherwise return false. + */ + public boolean checkVersion(String assetPath, int currentAppVersion) { + SQLiteDatabase db = getReadableDatabase(); + String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ?"; + String[] projection = {AssetCacheEntry.COLUMN_NAME_VERSION}; + String[] selectionArgs = {assetPath}; + + Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs); + + if (cursor.getCount() == 0) { + return false; + } + + cursor.moveToFirst(); + int cachedVersion = cursor.getInt( + cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_VERSION)); + cursor.close(); + return cachedVersion == currentAppVersion; + } + + /** + * Remove all entries in the version table that don't have the correct version. + * + *

Invalidates all cached asset contents that doesn't have the specified version. + */ + public void invalidateCache(int currentAppVersion) { + SQLiteDatabase db = getWritableDatabase(); + String selection = AssetCacheEntry.COLUMN_NAME_VERSION + " != ?"; + String[] selectionArgs = {Integer.toString(currentAppVersion)}; + // Remve the cached files. + removeCachedFiles(db, selection, selectionArgs); + // Remve the rows in the table. + db.delete(AssetCacheEntry.TABLE_NAME, selection, selectionArgs); + } + + /** + * Insert the cached version of the asset into the database. + */ + public void insertAsset(String asset, String cachePath, int appVersion) { + SQLiteDatabase db = getWritableDatabase(); + // Remove the old cached file first if they are different from the new cachePath. + String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ? and " + + AssetCacheEntry.COLUMN_NAME_CACHE_PATH + " != ?"; + String[] selectionArgs = {asset, cachePath}; + removeCachedFiles(db, selection, selectionArgs); + + ContentValues values = new ContentValues(); + values.put(AssetCacheEntry.COLUMN_NAME_ASSET, asset); + values.put(AssetCacheEntry.COLUMN_NAME_CACHE_PATH, cachePath); + values.put(AssetCacheEntry.COLUMN_NAME_VERSION, appVersion); + long newRowId = db.insertWithOnConflict( + AssetCacheEntry.TABLE_NAME, + null, + values, + SQLiteDatabase.CONFLICT_REPLACE); + // According to documentation, -1 means any error. + if (newRowId == -1) { + throw new RuntimeException("Can't insert entry into the mediapipe db."); + } + } + + @Override + public void onCreate(SQLiteDatabase db) { + db.execSQL(SQL_CREATE_TABLE); + } + + @Override + public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { + // Since version 1 doesn't have the path in the table, just upgrade the table. + db.execSQL(SQL_DELETE_TABLE); + onCreate(db); + } + + @Override + public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) { + onUpgrade(db, oldVersion, newVersion); + } + + private Cursor queryAssetCacheTable( + SQLiteDatabase db, String[] projection, String selection, String[] selectionArgs) { + return db.query( + AssetCacheEntry.TABLE_NAME, // The table to query + projection, // The columns to return + selection, // The columns for the WHERE clause + selectionArgs, // The values for the WHERE clause + null, // don't group the rows + null, // don't filter by row groups + null // The sort order + ); + } + + private void removeCachedFiles(SQLiteDatabase db, String selection, String[] selectionArgs) { + String[] projection = {AssetCacheEntry.COLUMN_NAME_CACHE_PATH}; + Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs); + if (cursor.moveToFirst()) { + do { + String cachedPath = cursor.getString( + cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_CACHE_PATH)); + File file = new File(cachedPath); + if (file.exists()) { + if (!file.delete()) { + logger.atWarning().log("Stale cached file: %s can't be deleted.", cachedPath); + } + } + } while (cursor.moveToNext()); + } + cursor.close(); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD new file mode 100644 index 000000000..e6ad76ed9 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD @@ -0,0 +1,84 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) # Apache 2.0 + +# MediaPipe Android framework. + +exports_files(["proguard.pgcfg"]) + +android_library( + name = "android_framework", + proguard_specs = [ + ":proguard.pgcfg", + ], + visibility = ["//visibility:public"], + exports = [ + ":android_core", + ":android_framework_no_mff", + ], +) + +# TODO: Rename android_framework_no_mff. +android_library( + name = "android_framework_no_mff", + srcs = glob( + ["Android*.java"], + ) + [ + "AssetCache.java", + "AssetCacheDbHelper.java", + "MediaPipeRunner.java", + ], + proguard_specs = [ + ":proguard.pgcfg", + ], + exports = [ + ":android_core", + ], + deps = [ + ":android_core", + "//third_party:androidx_annotation", + "//third_party:androidx_legacy_support_v4", + "@com_google_code_findbugs//jar", + "@com_google_common_flogger//jar", + "@com_google_common_flogger_system_backend//jar", + "@com_google_guava_android//jar", + ], +) + +# This is the Android version of "framework". +# TODO: unify once allowed by bazel. +# Note: this is not called "android_framework" for historical reasons (that target +# also includes other libraries). +android_library( + name = "android_core", + srcs = glob( + ["**/*.java"], + exclude = [ + "Android*", + "AssetCache.java", + "AssetCacheDbHelper.java", + "MediaPipeRunner.java", + ], + ), + deps = [ + "//mediapipe/framework:calculator_java_proto_lite", + "//mediapipe/framework:calculator_profile_java_proto_lite", + "//mediapipe/framework/tool:calculator_graph_template_java_proto_lite", + "@com_google_code_findbugs//jar", + "@com_google_common_flogger//jar", + "@com_google_common_flogger_system_backend//jar", + "@com_google_guava_android//jar", + ], +) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java new file mode 100644 index 000000000..e66c7e3d7 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java @@ -0,0 +1,34 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Utilities for compatibility with old versions of Android. + */ +public class Compat { + /** + * Returns the native handle to the current EGL context. Can be used as a + * replacement for EGL14.eglGetCurrentContext().getNativeHandle() before + * API 17. + */ + public static native long getCurrentNativeEGLContext(); + + /** + * Returns the native handle to the current EGL surface. Can be used as a + * replacement for EGL14.eglGetCurrentSurface().getNativeHandle() before + * API 17. + */ + public static native long getCurrentNativeEGLSurface(int readdraw); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml new file mode 100644 index 000000000..5c6267293 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml @@ -0,0 +1,2 @@ + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java new file mode 100644 index 000000000..d32faaf13 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java @@ -0,0 +1,37 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Represents a synchronization point for OpenGL operations. This can be needed when working with + * multiple GL contexts. + */ +public interface GlSyncToken { + /** + * Waits until the GPU has executed all commands up to the sync point. This blocks the CPU, and + * ensures the commands are complete from the point of view of all threads and contexts. + */ + void waitOnCpu(); + + /** + * Ensures that the following commands on the current OpenGL context will not be executed until + * the sync point has been reached. This does not block the CPU, and only affects the current + * OpenGL context. + */ + void waitOnGpu(); + + /** Releases the underlying native object. */ + void release(); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java new file mode 100644 index 000000000..8b871cc8c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java @@ -0,0 +1,658 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import com.google.common.base.Preconditions; +import com.google.common.flogger.FluentLogger; +import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig; +import com.google.protobuf.InvalidProtocolBufferException; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +//import com.google.mediapipe.proto.GraphTemplateProto.CalculatorGraphTemplate; +//import com.google.protobuf.InvalidProtocolBufferException; + +/** + * MediaPipe-related context. + * + *

Main purpose is to faciliate the memory management for native allocated mediapipe objects. + */ +public class Graph { + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + private static final int MAX_BUFFER_SIZE = 20; + private long nativeGraphHandle; + // Hold the references to callbacks. + private final List packetCallbacks = new ArrayList<>(); + private final List packetWithHeaderCallbacks = new ArrayList<>(); + // Side packets used for running the graph. + private Map sidePackets = new HashMap<>(); + // Stream headers used for running the graph. + private Map streamHeaders = new HashMap<>(); + // The mode of running used by this context. + // Based on the value of this mode, the caller can use {@link waitUntilIdle} to synchronize with + // the mediapipe native graph runner. + private boolean stepMode = false; + + private boolean startRunningGraphCalled = false; + private boolean graphRunning = false; + + /** Helper class for a buffered Packet and its timestamp. */ + private static class PacketBufferItem { + private PacketBufferItem(Packet packet, Long timestamp) { + this.packet = packet; + this.timestamp = timestamp; + } + + final Packet packet; + final Long timestamp; + } + + private Map> packetBuffers = new HashMap<>(); + + // This is used for methods that need to ensure the native context is alive + // while still allowing other methods of this class to execute concurrently. + // Note: if a method needs to acquire both this lock and the Graph intrinsic monitor, + // it must acquire the intrinsic monitor first. + private final Object terminationLock = new Object(); + + public Graph() { + nativeGraphHandle = nativeCreateGraph(); + } + + public synchronized long getNativeHandle() { + return nativeGraphHandle; + } + + public synchronized void setStepMode(boolean stepMode) { + this.stepMode = stepMode; + } + + public synchronized boolean getStepMode() { + return stepMode; + } + + /** + * Loads a binary mediapipe graph using an absolute file path. + * + * @param path An absolute file path to a mediapipe graph. An absolute file path can be obtained + * from asset file using {@link AssetCache}. + */ + public synchronized void loadBinaryGraph(String path) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + nativeLoadBinaryGraph(nativeGraphHandle, path); + } + + /** Loads a binary mediapipe graph from a byte array. */ + public synchronized void loadBinaryGraph(byte[] data) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + nativeLoadBinaryGraphBytes(nativeGraphHandle, data); + } + + /** Specifies a CalculatorGraphConfig for a mediapipe graph or subgraph. */ + public synchronized void loadBinaryGraph(CalculatorGraphConfig config) { + loadBinaryGraph(config.toByteArray()); + } + + /** Specifies a CalculatorGraphTemplate for a mediapipe graph or subgraph. + public synchronized void loadBinaryGraphTemplate(CalculatorGraphTemplate template) { + nativeLoadBinaryGraphTemplate(nativeGraphHandle, template.toByteArray()); + } + + /** Specifies the CalculatorGraphConfig::type of the top level graph. */ + public synchronized void setGraphType(String graphType) { + nativeSetGraphType(nativeGraphHandle, graphType); + } + + /** Specifies options such as template arguments for the graph. */ + public synchronized void setGraphOptions(CalculatorGraphConfig.Node options) { + nativeSetGraphOptions(nativeGraphHandle, options.toByteArray()); + } + + /** + * Returns the canonicalized CalculatorGraphConfig with subgraphs and graph templates expanded. + */ + public synchronized CalculatorGraphConfig getCalculatorGraphConfig() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + byte[] data = nativeGetCalculatorGraphConfig(nativeGraphHandle); + if (data != null) { + try { + return CalculatorGraphConfig.parseFrom(data); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + } + return null; + } + + /** + * Adds a {@link PacketCallback} to the context for callback during graph running. + * + * @param streamName The output stream name in the graph for callback. + * @param callback The callback for handling the call when output stream gets a {@link Packet}. + * @throws MediaPipeException for any error status. + */ + public synchronized void addPacketCallback(String streamName, PacketCallback callback) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + Preconditions.checkNotNull(streamName); + Preconditions.checkNotNull(callback); + Preconditions.checkState(!graphRunning && !startRunningGraphCalled); + packetCallbacks.add(callback); + nativeAddPacketCallback(nativeGraphHandle, streamName, callback); + } + + /** + * Adds a {@link PacketWithHeaderCallback} to the context for callback during graph running. + * + * @param streamName The output stream name in the graph for callback. + * @param callback The callback for handling the call when output stream gets a {@link Packet} and + * has a stream header. + * @throws MediaPipeException for any error status. + */ + public synchronized void addPacketWithHeaderCallback( + String streamName, PacketWithHeaderCallback callback) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + Preconditions.checkNotNull(streamName); + Preconditions.checkNotNull(callback); + Preconditions.checkState(!graphRunning && !startRunningGraphCalled); + packetWithHeaderCallbacks.add(callback); + nativeAddPacketWithHeaderCallback(nativeGraphHandle, streamName, callback); + } + + /** + * Adds a {@link SurfaceOutput} for a stream producing GpuBuffers. + * + *

Multiple outputs can be attached to the same stream. + * + * @param streamName The output stream name in the graph. + * @result a new SurfaceOutput. + */ + public synchronized SurfaceOutput addSurfaceOutput(String streamName) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + Preconditions.checkNotNull(streamName); + Preconditions.checkState(!graphRunning && !startRunningGraphCalled); + // TODO: check if graph is loaded. + return new SurfaceOutput( + this, Packet.create(nativeAddSurfaceOutput(nativeGraphHandle, streamName))); + } + + /** + * Sets the input side packets needed for running the graph. + * + * @param sidePackets MediaPipe input side packet name to {@link Packet} map. + */ + public synchronized void setInputSidePackets(Map sidePackets) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + Preconditions.checkState(!graphRunning && !startRunningGraphCalled); + for (Map.Entry entry : sidePackets.entrySet()) { + this.sidePackets.put(entry.getKey(), entry.getValue().copy()); + } + } + + public synchronized void setServiceObject(GraphService service, T object) { + service.installServiceObject(nativeGraphHandle, object); + } + + /** + * This tells the {@link Graph} before running the graph, we are expecting those headers to be + * available first. This function is usually called before the streaming starts. + * + *

Note: Because of some MediaPipe calculators need statically available header info before the + * graph is running, we need to have this to synchronize the running of graph with the + * availability of the header streams. + */ + public synchronized void addStreamNameExpectingHeader(String streamName) { + Preconditions.checkState(!graphRunning && !startRunningGraphCalled); + streamHeaders.put(streamName, null); + } + + /** + * Sets the stream header for specific stream if the header is not set. + * + *

If graph is already waiting for being started, start graph when all stream headers are set. + * + *

Note: If streamHeader is already being set, this call will not override the previous set + * value. To override, call the function below instead. + */ + public synchronized void setStreamHeader(String streamName, Packet streamHeader) { + setStreamHeader(streamName, streamHeader, false); + } + + /** + * Sets the stream header for specific stream. + * + *

If graph is already waiting for being started, start graph when all stream headers are set. + * + * @param override if true, override the previous set header, however, if graph is running, {@link + * IllegalArgumentException} will be thrown. + */ + public synchronized void setStreamHeader( + String streamName, Packet streamHeader, boolean override) { + Packet header = streamHeaders.get(streamName); + if (header != null) { + if (override) { + if (graphRunning) { + throw new IllegalArgumentException( + "Can't override an existing stream header, after graph started running."); + } + header.release(); + } else { + // Don't override, so just return since header is set already. + return; + } + } + streamHeaders.put(streamName, streamHeader.copy()); + if (!graphRunning && startRunningGraphCalled && hasAllStreamHeaders()) { + startRunningGraph(); + } + } + + /** + * Runs the mediapipe graph until it finishes. + * + *

Side packets that are needed by the graph should be set using {@link setInputSidePackets}. + * @throws MediaPipeException for any error status. + */ + public synchronized void runGraphUntilClose() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + Preconditions.checkNotNull(sidePackets); + String[] streamNames = new String[sidePackets.size()]; + long[] packets = new long[sidePackets.size()]; + splitStreamNamePacketMap(sidePackets, streamNames, packets); + nativeRunGraphUntilClose(nativeGraphHandle, streamNames, packets); + } + + /** + * Starts running the MediaPipe graph. + * + *

Returns immediately after starting the scheduler. + * + *

Side packets that are needed by the graph should be set using {@link setInputSidePackets}. + * @throws MediaPipeException for any error status. + */ + public synchronized void startRunningGraph() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + startRunningGraphCalled = true; + if (!hasAllStreamHeaders()) { + // Graph will be runned later when all stream headers are assembled. + logger.atInfo().log("MediaPipe graph won't start until all stream headers are available."); + return; + } + // Prepare the side packets. + String[] sidePacketNames = new String[sidePackets.size()]; + long[] sidePacketHandles = new long[sidePackets.size()]; + splitStreamNamePacketMap(sidePackets, sidePacketNames, sidePacketHandles); + // Prepare the Stream headers. + String[] streamNamesWithHeader = new String[streamHeaders.size()]; + long[] streamHeaderHandles = new long[streamHeaders.size()]; + splitStreamNamePacketMap(streamHeaders, streamNamesWithHeader, streamHeaderHandles); + nativeStartRunningGraph( + nativeGraphHandle, + sidePacketNames, + sidePacketHandles, + streamNamesWithHeader, + streamHeaderHandles); + // Packets can be buffered before the actual mediapipe graph starts. Send them in now, if we + // started successfully. + graphRunning = true; + moveBufferedPacketsToInputStream(); + } + + /** + * Sets blocking behavior when adding packets to a graph input stream via {@link + * addPacketToInputStream}. If set to true, the method will block until all dependent input + * streams fall below the maximum queue size set in the graph config. If false, it will return and + * not add a packet if any dependent input stream is full. To add a packet unconditionally, set + * the maximum queue size to -1 in the graph config. + */ + public synchronized void setGraphInputStreamBlockingMode(boolean mode) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + Preconditions.checkState(!graphRunning); + nativeSetGraphInputStreamBlockingMode(nativeGraphHandle, mode); + } + + /** + * Adds one packet into a graph input stream based on the graph stream input mode. + * + * @param streamName the name of the input stream. + * @param packet the mediapipe packet. + * @param timestamp the timestamp of the packet, although not enforced, the unit is normally + * microsecond. + * @throws MediaPipeException for any error status. + */ + public synchronized void addPacketToInputStream( + String streamName, Packet packet, long timestamp) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + if (!graphRunning) { + addPacketToBuffer(streamName, packet.copy(), timestamp); + } else { + nativeAddPacketToInputStream( + nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp); + } + } + + /** + * Adds one packet into a graph input stream based on the graph stream input mode. Also + * simultaneously yields ownership over to the graph stream, so additional memory optimizations + * are possible. When the function ends normally, the packet will be consumed and should no longer + * be referenced. When the function ends with MediaPipeException, the packet will remain + * unaffected, so this call may be retried later. + * + * @param streamName the name of the input stream. + * @param packet the mediapipe packet. + * @param timestamp the timestamp of the packet, although not enforced, the unit is normally + * microsecond. + * @throws MediaPipeException for any error status. + */ + public synchronized void addConsumablePacketToInputStream( + String streamName, Packet packet, long timestamp) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + if (!graphRunning) { + addPacketToBuffer(streamName, packet.copy(), timestamp); + // Release current packet to honor move semantics. + packet.release(); + } else { + + // We move the packet here into native, allowing it to take full control. + nativeMovePacketToInputStream( + nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp); + // The Java handle is released now if the packet was successfully moved. Otherwise the Java + // handle continues to own the packet contents. + packet.release(); + } + } + + /** + * Closes the specified input stream. + * @throws MediaPipeException for any error status. + */ + public synchronized void closeInputStream(String streamName) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + nativeCloseInputStream(nativeGraphHandle, streamName); + } + + /** + * Closes all the input streams in the mediapipe graph. + * @throws MediaPipeException for any error status. + */ + public synchronized void closeAllInputStreams() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + nativeCloseAllInputStreams(nativeGraphHandle); + } + + /** + * Closes all the input streams and source calculators in the mediapipe graph. + * @throws MediaPipeException for any error status. + */ + public synchronized void closeAllPacketSources() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + nativeCloseAllPacketSources(nativeGraphHandle); + } + + /** + * Waits until the graph is done processing. + * + *

This should be called after all sources and input streams are closed. + * @throws MediaPipeException for any error status. + */ + public synchronized void waitUntilGraphDone() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + nativeWaitUntilGraphDone(nativeGraphHandle); + } + + /** + * Waits until the graph runner is idle. + * @throws MediaPipeException for any error status. + */ + public synchronized void waitUntilGraphIdle() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called."); + nativeWaitUntilGraphIdle(nativeGraphHandle); + } + + /** Releases the native mediapipe context. */ + public synchronized void tearDown() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + for (Map.Entry entry : sidePackets.entrySet()) { + entry.getValue().release(); + } + sidePackets.clear(); + for (Map.Entry entry : streamHeaders.entrySet()) { + if (entry.getValue() != null) { + entry.getValue().release(); + } + } + streamHeaders.clear(); + for (Map.Entry> entry : packetBuffers.entrySet()) { + for (PacketBufferItem item : entry.getValue()) { + item.packet.release(); + } + } + packetBuffers.clear(); + synchronized (terminationLock) { + if (nativeGraphHandle != 0) { + nativeReleaseGraph(nativeGraphHandle); + nativeGraphHandle = 0; + } + } + packetCallbacks.clear(); + packetWithHeaderCallbacks.clear(); + } + + /** + * Updates the value of a MediaPipe packet that holds a reference to another MediaPipe packet. + * + *

This updates a mutable packet. Useful for the caluclator that needs to have an external way + * of updating the parameters using input side packets. + * + *

After calling this, the newPacket can be released (calling newPacket.release()), if no + * longer need to use it in Java. The {@code referencePacket} already holds the reference. + * + * @param referencePacket a mediapipe packet that has the value type Packet*. + * @param newPacket the new value for the reference packet to hold. + */ + public synchronized void updatePacketReference(Packet referencePacket, Packet newPacket) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + nativeUpdatePacketReference( + referencePacket.getNativeHandle(), newPacket.getNativeHandle()); + } + + /** + * Creates a shared GL runner with the specified name so that MediaPipe calculators can use + * OpenGL. This runner should be connected to the calculators by specifiying an input side packet + * in the graph file with the same name. + * + * @throws MediaPipeException for any error status. + * @deprecated Call {@link setParentGlContext} to set up texture sharing between contexts. Apart + * from that, GL is set up automatically. + */ + @Deprecated + public synchronized void createGlRunner(String name, long javaGlContext) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + Preconditions.checkArgument(name.equals("gpu_shared")); + setParentGlContext(javaGlContext); + } + + /** + * Specifies an external GL context to use as the parent of MediaPipe's GL context. This will + * enable the sharing of textures and other objects between the two contexts. + * + *

Cannot be called after the graph has been started. + * @throws MediaPipeException for any error status. + */ + public synchronized void setParentGlContext(long javaGlContext) { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + Preconditions.checkState(!graphRunning); + nativeSetParentGlContext(nativeGraphHandle, javaGlContext); + } + + /** + * Cancels the running graph. + */ + public synchronized void cancelGraph() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + nativeCancelGraph(nativeGraphHandle); + } + + /** Returns {@link GraphProfiler}. */ + public GraphProfiler getProfiler() { + Preconditions.checkState( + nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); + return new GraphProfiler(nativeGetProfiler(nativeGraphHandle), this); + } + + private boolean addPacketToBuffer(String streamName, Packet packet, long timestamp) { + if (!packetBuffers.containsKey(streamName)) { + packetBuffers.put(streamName, new ArrayList()); + } + List buffer = packetBuffers.get(streamName); + if (buffer.size() > MAX_BUFFER_SIZE) { + for (Map.Entry entry : streamHeaders.entrySet()) { + if (entry.getValue() == null) { + logger.atSevere().log("Stream: %s might be missing.", entry.getKey()); + } + } + throw new RuntimeException("Graph is not started because of missing streams"); + } + buffer.add(new PacketBufferItem(packet, timestamp)); + return true; + } + + // Any previously-buffered packets should be passed along to our graph. They've already been + // copied into our buffers, so it's fine to move them all over to native. + private void moveBufferedPacketsToInputStream() { + if (!packetBuffers.isEmpty()) { + for (Map.Entry> entry : packetBuffers.entrySet()) { + for (PacketBufferItem item : entry.getValue()) { + try { + nativeMovePacketToInputStream( + nativeGraphHandle, entry.getKey(), item.packet.getNativeHandle(), item.timestamp); + } catch (MediaPipeException e) { + logger.atSevere().log( + "AddPacket for stream: %s failed: %s.", entry.getKey(), e.getMessage()); + throw e; + } + // Need to release successfully moved packets + item.packet.release(); + } + } + packetBuffers.clear(); + } + } + + private static void splitStreamNamePacketMap( + Map namePacketMap, String[] streamNames, long[] packets) { + if (namePacketMap.size() != streamNames.length || namePacketMap.size() != packets.length) { + throw new RuntimeException("Input array length doesn't match the map size!"); + } + int i = 0; + for (Map.Entry entry : namePacketMap.entrySet()) { + streamNames[i] = entry.getKey(); + packets[i] = entry.getValue().getNativeHandle(); + ++i; + } + } + + private boolean hasAllStreamHeaders() { + for (Map.Entry entry : streamHeaders.entrySet()) { + if (entry.getValue() == null) { + return false; + } + } + return true; + } + + private native long nativeCreateGraph(); + + private native void nativeReleaseGraph(long context); + + private native void nativeAddPacketCallback( + long context, String streamName, PacketCallback callback); + + private native void nativeAddPacketWithHeaderCallback( + long context, String streamName, PacketWithHeaderCallback callback); + + private native long nativeAddSurfaceOutput(long context, String streamName); + + private native void nativeLoadBinaryGraph(long context, String path); + + private native void nativeLoadBinaryGraphBytes(long context, byte[] data); + + private native void nativeLoadBinaryGraphTemplate(long context, byte[] data); + + private native void nativeSetGraphType(long context, String graphType); + + private native void nativeSetGraphOptions(long context, byte[] data); + + private native byte[] nativeGetCalculatorGraphConfig(long context); + + private native void nativeRunGraphUntilClose(long context, String[] streamNames, long[] packets); + + private native void nativeStartRunningGraph( + long context, + String[] sidePacketNames, + long[] sidePacketHandles, + String[] streamNamesWithHeader, + long[] streamHeaderHandles); + + private native void nativeAddPacketToInputStream( + long context, String streamName, long packet, long timestamp); + + private native void nativeMovePacketToInputStream( + long context, String streamName, long packet, long timestamp); + + private native void nativeSetGraphInputStreamBlockingMode(long context, boolean mode); + + private native void nativeCloseInputStream(long context, String streamName); + + private native void nativeCloseAllInputStreams(long context); + + private native void nativeCloseAllPacketSources(long context); + + private native void nativeWaitUntilGraphDone(long context); + + private native void nativeWaitUntilGraphIdle(long context); + + private native void nativeUpdatePacketReference(long referencePacket, long newPacket); + + private native void nativeSetParentGlContext(long context, long javaGlContext); + + private native void nativeCancelGraph(long context); + + private native long nativeGetProfiler(long context); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java new file mode 100644 index 000000000..c141a95aa --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java @@ -0,0 +1,56 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Represents a synchronization point for OpenGL operations. It can be used to wait until the GPU + * has reached the specified point in the sequence of commands it is executing. This can be + * necessary when working with multiple GL contexts. + */ +final class GraphGlSyncToken implements GlSyncToken { + private long token; + + @Override + public void waitOnCpu() { + if (token != 0) { + nativeWaitOnCpu(token); + } + } + + @Override + public void waitOnGpu() { + if (token != 0) { + nativeWaitOnGpu(token); + } + } + + @Override + public void release() { + if (token != 0) { + nativeRelease(token); + token = 0; + } + } + + GraphGlSyncToken(long token) { + this.token = token; + } + + private static native void nativeWaitOnCpu(long token); + + private static native void nativeWaitOnGpu(long token); + + private static native void nativeRelease(long token); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java new file mode 100644 index 000000000..8d4016eb8 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java @@ -0,0 +1,97 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import com.google.common.base.Preconditions; +import com.google.mediapipe.proto.CalculatorProfileProto.CalculatorProfile; +import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; +import java.util.List; + +/** MediaPipe Profiler Java API. */ +public class GraphProfiler { + private final long nativeProfilerHandle; + private final Graph mediapipeGraph; + + GraphProfiler(long nativeProfilerHandle, Graph mediapipeGraph) { + Preconditions.checkState( + nativeProfilerHandle != 0, + "Invalid profiler, tearDown() might have been called already."); + this.nativeProfilerHandle = nativeProfilerHandle; + this.mediapipeGraph = mediapipeGraph; + } + + /** + * Resets all the calculator profilers in the graph. This only resets the information about + * Process() and does NOT affect information for Open() and Close() methods. + */ + public void reset() { + synchronized (mediapipeGraph) { + checkContext(); + nativeReset(nativeProfilerHandle); + } + } + + /** Resumes all the calculator profilers in the graph. No-op if already profiling. */ + public void resume() { + synchronized (mediapipeGraph) { + checkContext(); + nativeResume(nativeProfilerHandle); + } + } + + /** Pauses all the calculator profilers in the graph. No-op if already paused. */ + public void pause() { + synchronized (mediapipeGraph) { + checkContext(); + nativePause(nativeProfilerHandle); + } + } + + /** + * Collects the runtime profile for Open(), Process(), and Close() of each calculator in the + * graph. May be called at any time after the graph has been initialized. + */ + public List getCalculatorProfiles() { + synchronized (mediapipeGraph) { + checkContext(); + byte[][] profileBytes = nativeGetCalculatorProfiles(nativeProfilerHandle); + List profileList = new ArrayList<>(); + for (byte[] element : profileBytes) { + try { + CalculatorProfile profile = CalculatorProfile.parseFrom(element); + profileList.add(profile); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + } + return profileList; + } + } + + private void checkContext() { + Preconditions.checkState( + mediapipeGraph.getNativeHandle() != 0, + "Invalid context, tearDown() might have been called already."); + } + + private native void nativeReset(long profilingContextHandle); + + private native void nativeResume(long profilingContextHandle); + + private native void nativePause(long profilingContextHandle); + + private native byte[][] nativeGetCalculatorProfiles(long profilingContextHandle); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java new file mode 100644 index 000000000..2efc0ab9b --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java @@ -0,0 +1,30 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Implement this interface to wrap a native GraphService. + * + *

T should be the Java class wrapping the native service object. + */ +public interface GraphService { + /** + * Provides the native service object corresponding to the provided Java object. This must be + * handled by calling mediapipe::android::GraphServiceHelper::SetServiceObject in native code, + * passing the provided context argument. We do it this way to minimize the number of trips + * through JNI and maintain more type safety in the native code. + */ + public void installServiceObject(long context, T object); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java new file mode 100644 index 000000000..e289ee74e --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java @@ -0,0 +1,95 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * A {@link TextureFrame} that represents a texture produced by MediaPipe. + * + *

The consumer is typically your application, which should therefore call the {@link #release()} + * method. + */ +public class GraphTextureFrame implements TextureFrame { + private long nativeBufferHandle; + // We cache these to be able to get them without a JNI call. + private int textureName; + private int width; + private int height; + private long timestamp = Long.MIN_VALUE; + + GraphTextureFrame(long nativeHandle, long timestamp) { + nativeBufferHandle = nativeHandle; + // TODO: use a single JNI call to fill in all info + textureName = nativeGetTextureName(nativeBufferHandle); + width = nativeGetWidth(nativeBufferHandle); + height = nativeGetHeight(nativeBufferHandle); + this.timestamp = timestamp; + } + + /** Returns the name of the underlying OpenGL texture. */ + @Override + public int getTextureName() { + return textureName; + } + + /** Returns the width of the underlying OpenGL texture. */ + @Override + public int getWidth() { + return width; + } + + /** Returns the height of the underlying OpenGL texture. */ + @Override + public int getHeight() { + return height; + } + + @Override + public long getTimestamp() { + return timestamp; + } + + /** + * Releases a reference to the underlying buffer. + * + *

The consumer calls this when it is done using the texture. + */ + @Override + public void release() { + if (nativeBufferHandle != 0) { + nativeReleaseBuffer(nativeBufferHandle); + nativeBufferHandle = 0; + } + } + + /** + * Releases a reference to the underlying buffer. + * + *

This form of the method is called when the consumer is MediaPipe itself. This can occur if a + * packet coming out of the graph is sent back into an input stream. Since both the producer and + * the consumer use the same context, we do not need to do further synchronization. Note: we do + * not currently support GPU sync across multiple graphs. TODO: Application consumers + * currently cannot create a GlSyncToken, so they cannot call this method. + */ + @Override + public void release(GlSyncToken syncToken) { + syncToken.release(); + release(); + } + + private native void nativeReleaseBuffer(long nativeHandle); + private native int nativeGetTextureName(long nativeHandle); + private native int nativeGetWidth(long nativeHandle); + private native int nativeGetHeight(long nativeHandle); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java new file mode 100644 index 000000000..900585770 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java @@ -0,0 +1,76 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +// Package java.nio.charset is not yet available in all Android apps. +import static com.google.common.base.Charsets.UTF_8; + +/** This class represents an error reported by the MediaPipe framework. */ +public class MediaPipeException extends RuntimeException { + public MediaPipeException(int statusCode, String statusMessage) { + super(StatusCode.values()[statusCode].description() + ": " + statusMessage); + this.statusCode = StatusCode.values()[statusCode]; + this.statusMessage = statusMessage; + } + + // Package base.Charsets is deprecated by package java.nio.charset is not + // yet available in all Android apps. + @SuppressWarnings("deprecation") + MediaPipeException(int code, byte[] message) { + this(code, new String(message, UTF_8)); + } + + public StatusCode getStatusCode() { + return statusCode; + } + + public String getStatusMessage() { + return statusMessage; + } + + /** The 17 canonical status codes. */ + public enum StatusCode { + OK("ok"), + CANCELLED("canceled"), + UNKNOWN("unknown"), + INVALID_ARGUMENT("invalid argument"), + DEADLINE_EXCEEDED("deadline exceeded"), + NOT_FOUND("not found"), + ALREADY_EXISTS("already exists"), + PERMISSION_DENIED("permission denied"), + RESOURCE_EXHAUSTED("resource exhausted"), + FAILED_PRECONDITION("failed precondition"), + ABORTED("aborted"), + OUT_OF_RANGE("out of range"), + UNIMPLEMENTED("unimplemented"), + INTERNAL("internal"), + UNAVAILABLE("unavailable"), + DATA_LOSS("data loss"), + UNAUTHENTICATED("unauthenticated"); + + StatusCode(String description) { + this.description = description; + } + + public String description() { + return description; + } + + private final String description; + }; + + private final StatusCode statusCode; + private final String statusMessage; +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java new file mode 100644 index 000000000..39a527372 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java @@ -0,0 +1,53 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import android.content.Context; + +/** {@link MediaPipeRunner} is an abstract class for running MediaPipe graph in Android. */ +public abstract class MediaPipeRunner extends Graph { + protected Context context; + + public MediaPipeRunner(Context context) { + // Creates a singleton AssetCache. + AssetCache.create(context); + this.context = context; + } + + public void loadBinaryGraphFromAsset(String assetPath) { + try { + this.loadBinaryGraph(AssetCache.getAssetCache().getAbsolutePathFromAsset(assetPath)); + } catch (MediaPipeException e) { + // TODO: Report this error from MediaPipe. + } + } + + /** + * Starts running the graph. + */ + public abstract void start(); + /** + * Pauses a running graph. + */ + public abstract void pause(); + /** + * Resumes a paused graph. + */ + public abstract void resume(); + /** + * Stops the running graph and releases the resource. Call this in Activity onDestroy callback. + */ + public abstract void release(); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java new file mode 100644 index 000000000..f34573d0a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java @@ -0,0 +1,85 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Java wrapper class for a native MediaPipe Packet. + * + *

To interpret the content of the packet, use {@link PacketGetter}. To create content of a + * packet, use {@link PacketCreator}. Java Packet should be released manually when no longer needed. + * + *

{@link Packet} can also be managed by {@link Graph}, which automatically releases all the + * packets in the context, however, we still need to be careful of the memory, and release them as + * soon as not needed. + */ +public class Packet { + // Points to a native Packet. + private long nativePacketHandle; + + /** + * Creates a Java packet from a native mediapipe packet handle. + * + * @return A Packet from a native internal::PacketWithContext handle. + */ + public static Packet create(long nativeHandle) { + return new Packet(nativeHandle); + } + + /** + * @return The native handle of the packet. + */ + public long getNativeHandle() { + return nativePacketHandle; + } + + /** @return The timestamp of the Packet. */ + public long getTimestamp() { + return nativeGetTimestamp(nativePacketHandle); + } + + /** + * @return a shared copy of the Packet. + *

This is essentially increasing the reference count to the data encapsulated in the + * native mediapipe packet. + */ + public Packet copy() { + return new Packet(nativeCopyPacket(nativePacketHandle)); + } + + /** + * Releases the native allocation of the packet. + * + *

After the Graph for this packet is torn down, calling this will cause unexpected behavior. + * Since Graph tearDown will release all native memories of the Packets it holds. + */ + public void release() { + if (nativePacketHandle != 0) { + nativeReleasePacket(nativePacketHandle); + nativePacketHandle = 0; + } + } + + // Packet is not intended to be constructed directly. + private Packet(long handle) { + nativePacketHandle = handle; + } + + // Releases the native memeory. + private native void nativeReleasePacket(long packetHandle); + + private native long nativeCopyPacket(long packetHandle); + + private native long nativeGetTimestamp(long packetHandle); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java new file mode 100644 index 000000000..ac8eef874 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java @@ -0,0 +1,20 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** Interface for MediaPipe callback with packet. */ +public interface PacketCallback { + public void process(Packet packet); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java new file mode 100644 index 000000000..9d97504f4 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java @@ -0,0 +1,308 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import com.google.protobuf.MessageLite; +import java.nio.ByteBuffer; + +// TODO: use Preconditions in this file. +/** + * Creates {@link Packet} in the given {@link Graph}. + * + *

This class provides a set of functions to create basic mediapipe packet types. + */ +public class PacketCreator { + protected Graph mediapipeGraph; + + public PacketCreator(Graph context) { + mediapipeGraph = context; + } + + /** + * Create a MediaPipe Packet that contains a pointer to another MediaPipe packet. + * + *

This can be used as a way to update the value of a packet. Similar to a mutable packet using + * mediapipe::AdoptAsUniquePtr. + * + *

The parameter {@code packet} can be released after this call, since the new packet already + * holds a reference to it in the native object. + */ + public Packet createReferencePacket(Packet packet) { + return Packet.create( + nativeCreateReferencePacket(mediapipeGraph.getNativeHandle(), packet.getNativeHandle())); + } + + /** + * Creates a 3 channel RGB ImageFrame packet from an RGB buffer. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. The pixel rows should have + * 4-byte alignment. + */ + public Packet createRgbImage(ByteBuffer buffer, int width, int height) { + int widthStep = (((width * 3) + 3) / 4) * 4; + if (widthStep * height != buffer.capacity()) { + throw new RuntimeException("The size of the buffer should be: " + widthStep * height); + } + return Packet.create( + nativeCreateRgbImage(mediapipeGraph.getNativeHandle(), buffer, width, height)); + } + + /** + * Create a MediaPipe audio packet that is used by most of the audio calculators. + * + * @param data the raw audio data, bytes per sample is 2. + * @param numChannels number of channels in the raw data. + * @param numSamples number of samples in the data. + */ + public Packet createAudioPacket(byte[] data, int numChannels, int numSamples) { + if (numChannels * numSamples * 2 != data.length) { + throw new RuntimeException("Data doesn't have the correct size."); + } + return Packet.create( + nativeCreateAudioPacket(mediapipeGraph.getNativeHandle(), data, numChannels, numSamples)); + } + + /** + * Creates a 3 channel RGB ImageFrame packet from an RGBA buffer. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. + */ + public Packet createRgbImageFromRgba(ByteBuffer buffer, int width, int height) { + if (width * height * 4 != buffer.capacity()) { + throw new RuntimeException("The size of the buffer should be: " + width * height * 4); + } + return Packet.create( + nativeCreateRgbImageFromRgba(mediapipeGraph.getNativeHandle(), buffer, width, height)); + } + + /** + * Creates a 1 channel ImageFrame packet from an U8 buffer. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. + */ + public Packet createGrayscaleImage(ByteBuffer buffer, int width, int height) { + if (width * height != buffer.capacity()) { + throw new RuntimeException( + "The size of the buffer should be: " + width * height + " but is " + buffer.capacity()); + } + return Packet.create( + nativeCreateGrayscaleImage(mediapipeGraph.getNativeHandle(), buffer, width, height)); + } + + /** + * Creates a 4 channel RGBA ImageFrame packet from an RGBA buffer. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. + */ + public Packet createRgbaImageFrame(ByteBuffer buffer, int width, int height) { + if (buffer.capacity() != width * height * 4) { + throw new RuntimeException("buffer doesn't have the correct size."); + } + return Packet.create( + nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), buffer, width, height)); + } + + public Packet createInt16(short value) { + return Packet.create(nativeCreateInt16(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createInt32(int value) { + return Packet.create(nativeCreateInt32(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createInt64(long value) { + return Packet.create(nativeCreateInt64(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createFloat32(float value) { + return Packet.create(nativeCreateFloat32(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createFloat64(double value) { + return Packet.create(nativeCreateFloat64(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createBool(boolean value) { + return Packet.create(nativeCreateBool(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createString(String value) { + return Packet.create(nativeCreateString(mediapipeGraph.getNativeHandle(), value)); + } + + public Packet createInt16Vector(short[] data) { + throw new UnsupportedOperationException("Not implemented yet"); + } + + public Packet createInt32Vector(int[] data) { + throw new UnsupportedOperationException("Not implemented yet"); + } + + public Packet createInt64Vector(long[] data) { + throw new UnsupportedOperationException("Not implemented yet"); + } + + public Packet createFloat32Vector(float[] data) { + throw new UnsupportedOperationException("Not implemented yet"); + } + + public Packet createFloat64Vector(double[] data) { + throw new UnsupportedOperationException("Not implemented yet"); + } + + public Packet createInt32Array(int[] data) { + return Packet.create(nativeCreateInt32Array(mediapipeGraph.getNativeHandle(), data)); + } + + public Packet createFloat32Array(float[] data) { + return Packet.create(nativeCreateFloat32Array(mediapipeGraph.getNativeHandle(), data)); + } + + public Packet createByteArray(byte[] data) { + return Packet.create(nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), data)); + } + + /** + * Creates a VideoHeader to be used by the calculator that requires it. + * + *

Note: we are not populating frame rate and duration. If the calculator needs those values, + * the calculator is not suitable here. Modify the calculator to not require those values to work. + */ + public Packet createVideoHeader(int width, int height) { + return Packet.create(nativeCreateVideoHeader(mediapipeGraph.getNativeHandle(), width, height)); + } + + /** + * Creates a mediapipe::TimeSeriesHeader, which is used by many audio related calculators. + * + * @param numChannels number of audio channels. + * @param sampleRate sampling rate in Hertz. + */ + public Packet createTimeSeriesHeader(int numChannels, double sampleRate) { + return Packet.create( + nativeCreateTimeSeriesHeader(mediapipeGraph.getNativeHandle(), numChannels, sampleRate)); + } + + public Packet createMatrix(int rows, int cols, float[] data) { + return Packet.create(nativeCreateMatrix(mediapipeGraph.getNativeHandle(), rows, cols, data)); + } + + /** Creates a {@link Packet} containing the serialized proto string. */ + public Packet createSerializedProto(MessageLite message) { + return Packet.create( + nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), message.toByteArray())); + } + + /** Creates a {@link Packet} containing a {@code CalculatorOptions} proto message. */ + public Packet createCalculatorOptions(MessageLite message) { + return Packet.create( + nativeCreateCalculatorOptions(mediapipeGraph.getNativeHandle(), message.toByteArray())); + } + + /** Creates a {@link Packet} containing the given camera intrinsics. */ + public Packet createCameraIntrinsics( + float fx, float fy, float cx, float cy, float width, float height) { + return Packet.create( + nativeCreateCameraIntrinsics( + mediapipeGraph.getNativeHandle(), fx, fy, cx, cy, width, height)); + } + + /** + * Creates a mediapipe::GpuBuffer with the specified texture name and dimensions. + * + * @param name the OpenGL texture name. + * @param width the width in pixels. + * @param height the height in pixels. + * @param releaseCallback a callback to be invoked when the mediapipe::GpuBuffer is released. Can be + * null. + */ + public Packet createGpuBuffer( + int name, int width, int height, TextureReleaseCallback releaseCallback) { + return Packet.create( + nativeCreateGpuBuffer( + mediapipeGraph.getNativeHandle(), name, width, height, releaseCallback)); + } + + /** + * Creates a mediapipe::GpuBuffer with the specified texture name and dimensions. + * + * @param name the OpenGL texture name. + * @param width the width in pixels. + * @param height the height in pixels. + * @deprecated use {@link #createGpuBuffer(int,int,int,TextureReleaseCallback)} instead. + */ + @Deprecated + public Packet createGpuBuffer(int name, int width, int height) { + return Packet.create( + nativeCreateGpuBuffer(mediapipeGraph.getNativeHandle(), name, width, height, null)); + } + + /** + * Creates a mediapipe::GpuBuffer with the provided {@link TextureFrame}. + * + *

Note: in order for MediaPipe to be able to access the texture, the application's GL context + * must be linked with MediaPipe's. This is ensured by calling {@link + * Graph#createGlRunner(String,long)} with the native handle to the application's GL context as + * the second argument. + */ + public Packet createGpuBuffer(TextureFrame frame) { + return Packet.create( + nativeCreateGpuBuffer( + mediapipeGraph.getNativeHandle(), + frame.getTextureName(), + frame.getWidth(), + frame.getHeight(), + frame)); + } + + /** Helper callback adaptor to create the Java {@link GlSyncToken}. This is called by JNI code. */ + private void releaseWithSyncToken(long nativeSyncToken, TextureReleaseCallback releaseCallback) { + releaseCallback.release(new GraphGlSyncToken(nativeSyncToken)); + } + + private native long nativeCreateReferencePacket(long context, long packet); + private native long nativeCreateRgbImage(long context, ByteBuffer buffer, int width, int height); + private native long nativeCreateAudioPacket( + long context, byte[] data, int numChannels, int numSamples); + private native long nativeCreateRgbImageFromRgba( + long context, ByteBuffer buffer, int width, int height); + + private native long nativeCreateGrayscaleImage( + long context, ByteBuffer buffer, int width, int height); + + private native long nativeCreateRgbaImageFrame( + long context, ByteBuffer buffer, int width, int height); + private native long nativeCreateInt16(long context, short value); + private native long nativeCreateInt32(long context, int value); + private native long nativeCreateInt64(long context, long value); + private native long nativeCreateFloat32(long context, float value); + private native long nativeCreateFloat64(long context, double value); + private native long nativeCreateBool(long context, boolean value); + private native long nativeCreateString(long context, String value); + private native long nativeCreateVideoHeader(long context, int width, int height); + private native long nativeCreateTimeSeriesHeader( + long context, int numChannels, double sampleRate); + private native long nativeCreateMatrix(long context, int rows, int cols, float[] data); + private native long nativeCreateGpuBuffer( + long context, int name, int width, int height, TextureReleaseCallback releaseCallback); + private native long nativeCreateInt32Array(long context, int[] data); + private native long nativeCreateFloat32Array(long context, float[] data); + private native long nativeCreateStringFromByteArray(long context, byte[] data); + + private native long nativeCreateCalculatorOptions(long context, byte[] data); + + private native long nativeCreateCameraIntrinsics( + long context, float fx, float fy, float cx, float cy, float width, float height); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java new file mode 100644 index 000000000..a1a05b175 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java @@ -0,0 +1,303 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import com.google.common.flogger.FluentLogger; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +/** + * Converts the {@link Packet} to java accessible data types. + * + *

{@link Packet} is a thin java wrapper for the native MediaPipe packet. This class provides the + * extendable conversion needed to access the data in the packet. + * + *

Note that it is still the developer's responsibility to interpret the data correctly. + */ +public final class PacketGetter { + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + + /** Helper class for a list of exactly two Packets. */ + public static class PacketPair { + public PacketPair(Packet first, Packet second) { + this.first = first; + this.second = second; + } + + final Packet first; + final Packet second; + } + + /** + * Returns the {@link Packet} that held in the reference packet. + * + *

Note: release the returned packet after use. + */ + public static Packet getPacketFromReference(final Packet referencePacket) { + return Packet.create(nativeGetPacketFromReference(referencePacket.getNativeHandle())); + } + + /** + * The {@link Packet} contains a pair of packets, return both of them. + * + *

Note: release the packets in the pair after use. + * + * @param packet A MediaPipe packet that contains a pair of packets. + */ + public static PacketPair getPairOfPackets(final Packet packet) { + long[] handles = nativeGetPairPackets(packet.getNativeHandle()); + return new PacketPair(Packet.create(handles[0]), Packet.create(handles[1])); + } + + /** + * Returns a list of packets that are contained in The {@link Packet}. + * + *

Note: release the packets in the list after use. + * + * @param packet A MediaPipe packet that contains a vector of packets. + */ + public static List getVectorOfPackets(final Packet packet) { + long[] handles = nativeGetVectorPackets(packet.getNativeHandle()); + List packets = new ArrayList<>(handles.length); + for (long handle : handles) { + packets.add(Packet.create(handle)); + } + return packets; + } + + public static short getInt16(final Packet packet) { + return nativeGetInt16(packet.getNativeHandle()); + } + + public static int getInt32(final Packet packet) { + return nativeGetInt32(packet.getNativeHandle()); + } + + public static long getInt64(final Packet packet) { + return nativeGetInt64(packet.getNativeHandle()); + } + + public static float getFloat32(final Packet packet) { + return nativeGetFloat32(packet.getNativeHandle()); + } + + public static double getFloat64(final Packet packet) { + return nativeGetFloat64(packet.getNativeHandle()); + } + + public static boolean getBool(final Packet packet) { + return nativeGetBool(packet.getNativeHandle()); + } + + public static String getString(final Packet packet) { + return nativeGetString(packet.getNativeHandle()); + } + + public static byte[] getBytes(final Packet packet) { + return nativeGetBytes(packet.getNativeHandle()); + } + + public static byte[] getProtoBytes(final Packet packet) { + return nativeGetProtoBytes(packet.getNativeHandle()); + } + + public static short[] getInt16Vector(final Packet packet) { + return nativeGetInt16Vector(packet.getNativeHandle()); + } + + public static int[] getInt32Vector(final Packet packet) { + return nativeGetInt32Vector(packet.getNativeHandle()); + } + + public static long[] getInt64Vector(final Packet packet) { + return nativeGetInt64Vector(packet.getNativeHandle()); + } + + public static float[] getFloat32Vector(final Packet packet) { + return nativeGetFloat32Vector(packet.getNativeHandle()); + } + + public static double[] getFloat64Vector(final Packet packet) { + return nativeGetFloat64Vector(packet.getNativeHandle()); + } + + public static int getImageWidth(final Packet packet) { + return nativeGetImageWidth(packet.getNativeHandle()); + } + + public static int getImageHeight(final Packet packet) { + return nativeGetImageHeight(packet.getNativeHandle()); + } + + /** + * Returns the native image buffer in ByteBuffer. It assumes the output buffer stores pixels + * contiguously. It returns false if this assumption does not hold. + * + *

Note: this function does not assume the pixel format. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. + */ + public static boolean getImageData(final Packet packet, ByteBuffer buffer) { + return nativeGetImageData(packet.getNativeHandle(), buffer); + } + + /** + * Converts an RGB mediapipe image frame packet to an RGBA Byte buffer. + * + *

Use {@link ByteBuffer#allocateDirect} when allocating the buffer. + */ + public static boolean getRgbaFromRgb(final Packet packet, ByteBuffer buffer) { + return nativeGetRgbaFromRgb(packet.getNativeHandle(), buffer); + } + + /** + * Converts the audio matrix data back into byte data. + * + *

The matrix is in column major order. + */ + public static byte[] getAudioByteData(final Packet packet) { + return nativeGetAudioData(packet.getNativeHandle()); + } + + /** + * Audio data is in MediaPipe Matrix format. + * + * @return the number of channels in the data. + */ + public static int getAudioDataNumChannels(final Packet packet) { + return nativeGetMatrixRows(packet.getNativeHandle()); + } + + /** + * Audio data is in MediaPipe Matrix format. + * + * @return the number of samples in the data. + */ + public static int getAudioDataNumSamples(final Packet packet) { + return nativeGetMatrixCols(packet.getNativeHandle()); + } + + /** + * In addition to the data packet, mediapipe currently also has a separate audio header: {@code + * mediapipe::TimeSeriesHeader}. + * + * @return the number of channel in the header packet. + */ + public static int getTimeSeriesHeaderNumChannels(final Packet packet) { + return nativeGetTimeSeriesHeaderNumChannels(packet.getNativeHandle()); + } + + /** + * In addition to the data packet, mediapipe currently also has a separate audio header: {@code + * mediapipe::TimeSeriesHeader}. + * + * @return the sampling rate in the header packet. + */ + public static double getTimeSeriesHeaderSampleRate(final Packet packet) { + return nativeGetTimeSeriesHeaderSampleRate(packet.getNativeHandle()); + } + + /** Gets the width in video header packet. */ + public static int getVideoHeaderWidth(final Packet packet) { + return nativeGetVideoHeaderWidth(packet.getNativeHandle()); + } + + /** Gets the height in video header packet. */ + public static int getVideoHeaderHeight(final Packet packet) { + return nativeGetVideoHeaderHeight(packet.getNativeHandle()); + } + + /** + * Returns the float array data of the mediapipe Matrix. + * + *

Underlying packet stores the matrix as {@code ::mediapipe::Matrix}. + */ + public static float[] getMatrixData(final Packet packet) { + return nativeGetMatrixData(packet.getNativeHandle()); + } + + public static int getMatrixRows(final Packet packet) { + return nativeGetMatrixRows(packet.getNativeHandle()); + } + + public static int getMatrixCols(final Packet packet) { + return nativeGetMatrixCols(packet.getNativeHandle()); + } + + /** + * Returns the GL texture name of the mediapipe::GpuBuffer. + * + * @deprecated use {@link #getTextureFrame} instead. + */ + @Deprecated + public static int getGpuBufferName(final Packet packet) { + return nativeGetGpuBufferName(packet.getNativeHandle()); + } + + /** + * Returns a {@link GraphTextureFrame} referencing a C++ mediapipe::GpuBuffer. + * + *

Note: in order for the application to be able to use the texture, its GL context must be + * linked with MediaPipe's. This is ensured by calling {@link Graph#createGlRunner(String,long)} + * with the native handle to the application's GL context as the second argument. + */ + public static GraphTextureFrame getTextureFrame(final Packet packet) { + return new GraphTextureFrame( + nativeGetGpuBuffer(packet.getNativeHandle()), packet.getTimestamp()); + } + + private static native long nativeGetPacketFromReference(long nativePacketHandle); + private static native long[] nativeGetPairPackets(long nativePacketHandle); + private static native long[] nativeGetVectorPackets(long nativePacketHandle); + + private static native short nativeGetInt16(long nativePacketHandle); + private static native int nativeGetInt32(long nativePacketHandle); + private static native long nativeGetInt64(long nativePacketHandle); + private static native float nativeGetFloat32(long nativePacketHandle); + private static native double nativeGetFloat64(long nativePacketHandle); + private static native boolean nativeGetBool(long nativePacketHandle); + private static native String nativeGetString(long nativePacketHandle); + private static native byte[] nativeGetBytes(long nativePacketHandle); + private static native byte[] nativeGetProtoBytes(long nativePacketHandle); + private static native short[] nativeGetInt16Vector(long nativePacketHandle); + private static native int[] nativeGetInt32Vector(long nativePacketHandle); + private static native long[] nativeGetInt64Vector(long nativePacketHandle); + private static native float[] nativeGetFloat32Vector(long nativePacketHandle); + private static native double[] nativeGetFloat64Vector(long nativePacketHandle); + private static native int nativeGetImageWidth(long nativePacketHandle); + private static native int nativeGetImageHeight(long nativePacketHandle); + private static native boolean nativeGetImageData(long nativePacketHandle, ByteBuffer buffer); + private static native boolean nativeGetRgbaFromRgb(long nativePacketHandle, ByteBuffer buffer); + // Retrieves the values that are in the VideoHeader. + private static native int nativeGetVideoHeaderWidth(long nativepackethandle); + private static native int nativeGetVideoHeaderHeight(long nativepackethandle); + // Retrieves the values that are in the mediapipe::TimeSeriesHeader. + private static native int nativeGetTimeSeriesHeaderNumChannels(long nativepackethandle); + + private static native double nativeGetTimeSeriesHeaderSampleRate(long nativepackethandle); + + // Audio data in MediaPipe current uses MediaPipe Matrix format type. + private static native byte[] nativeGetAudioData(long nativePacketHandle); + // Native helper functions to access the MediaPipe Matrix data. + private static native float[] nativeGetMatrixData(long nativePacketHandle); + + private static native int nativeGetMatrixRows(long nativePacketHandle); + private static native int nativeGetMatrixCols(long nativePacketHandle); + private static native int nativeGetGpuBufferName(long nativePacketHandle); + private static native long nativeGetGpuBuffer(long nativePacketHandle); + + private PacketGetter() {} +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java new file mode 100644 index 000000000..9803254f9 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java @@ -0,0 +1,20 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** Interface for MediaPipe callback with packet and packet header. */ +public interface PacketWithHeaderCallback { + public void process(Packet packet, Packet packetHeader); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java new file mode 100644 index 000000000..454ff2c2c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java @@ -0,0 +1,79 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +import javax.annotation.Nullable; + +/** + * Outputs a MediaPipe video stream to an {@link android.opengl.EGLSurface}. + * + *

Should be created using {@link Graph#addEglSurfaceOutput}. + */ +public class SurfaceOutput { + private Packet surfaceHolderPacket; + private Graph mediapipeGraph; + + SurfaceOutput(Graph context, Packet holderPacket) { + mediapipeGraph = context; + surfaceHolderPacket = holderPacket; + } + + /** + * Sets vertical flipping of the output surface, useful for conversion between coordinate systems + * with top-left v.s. bottom-left origins. This should be called before {@link + * #setSurface(Object)} or {@link #setEglSurface(long)}. + */ + public void setFlipY(boolean flip) { + nativeSetFlipY(surfaceHolderPacket.getNativeHandle(), flip); + } + + /** + * Connects an Android {@link Surface} to an output. + * + *

This creates the requisite {@link EGLSurface} internally. If one has already been created + * for this Surface outside of MediaPipe, the call will fail. + * + *

Note that a given Surface can only be connected to one output. If you wish to move it to a + * different output, first call {@code setSurface(null)} on the old output. + * + * @param surface The surface to connect. Can be {@code null}. + */ + public void setSurface(@Nullable Object surface) { + nativeSetSurface( + mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), surface); + } + + /** + * Connects an EGL surface to an output. + * + *

NOTE: The surface needs to be compatible with the GL context used by MediaPipe. In practice + * this means the EGL context that created the surface should use the same config as used by the + * MediaPipe GL context, otherwise the surface sink calculator will fail with {@code + * EGL_BAD_MATCH}. + * + * @param nativeEglSurface Native handle to the egl surface. + */ + public void setEglSurface(long nativeEglSurface) { + nativeSetEglSurface( + mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), nativeEglSurface); + } + + private native void nativeSetFlipY(long nativePacket, boolean flip); + + private native void nativeSetSurface( + long nativeContext, long nativePacket, Object surface); + private native void nativeSetEglSurface( + long nativeContext, long nativePacket, long nativeEglSurface); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java new file mode 100644 index 000000000..babfd2958 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java @@ -0,0 +1,62 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * Interface for a video frame that can be accessed as a texture. + * + *

This interface defines a producer/consumer relationship between the component that originates + * the TextureFrame and the component that receives it. The consumer must call {@link + * #release()} when it is done using the frame. This gives the producer the opportunity to recycle + * the resource. + * + *

When your application sends a TextureFrame into a MediaPipe graph, the application is the + * producer and MediaPipe is the consumer. MediaPipe will call the release() method when all copies + * of the packet holding the texture have been destroyed. + * + *

When MediaPipe sends a TextureFrame to the application, MediaPipe is the producer and the + * application is the consumer. The application should call the release() method. + * + *

You can also send a TextureFrame from a component of your application to another. In this + * case, the receiving component is the consumer, and should call release(). This can be useful, for + * instance, if your application requires a "raw" mode where frames are sent directly from the video + * source to the renderer, bypassing MediaPipe. + */ +public interface TextureFrame extends TextureReleaseCallback { + /** The OpenGL name of the texture. */ + int getTextureName(); + + /** Width of the frame in pixels. */ + int getWidth(); + + /** Height of the frame in pixels. */ + int getHeight(); + + /** The presentation time of the frame in microseconds **/ + long getTimestamp(); + + /** + * The consumer that receives this TextureFrame must call this method to inform the provider that + * it is done with it. + */ + void release(); + + /** + * If this texture is provided to MediaPipe, this method will be called when it is released. The + * {@link GlSyncToken} can be used to wait for the GPU to be entirely done reading the texture. + */ + @Override + void release(GlSyncToken syncToken); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java new file mode 100644 index 000000000..e84d89358 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java @@ -0,0 +1,27 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.framework; + +/** + * A callback that gets invoked when a texture is no longer in use. + */ +public interface TextureReleaseCallback { + /** + * Called when the texture has been released. The sync token can be used to ensure that the GPU is + * done reading from it. Implementations of this interface should release the token once they are + * done with it. + */ + void release(GlSyncToken syncToken); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD new file mode 100644 index 000000000..182226cbb --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD @@ -0,0 +1,143 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) # Apache 2.0 + +package( + default_visibility = ["//visibility:public"], + features = ["no_layering_check"], +) + +alias( + name = "mediapipe_android_framework_jni", + actual = ":mediapipe_framework_jni", +) + +cc_library( + name = "mediapipe_framework_jni", + srcs = [ + "compat_jni.cc", + "graph.cc", + "graph_jni.cc", + "graph_service_jni.cc", + "packet_context_jni.cc", + "packet_creator_jni.cc", + "packet_getter_jni.cc", + "graph_profiler_jni.cc", + ] + select({ + "//conditions:default": [], + "//mediapipe:android": [ + "android_asset_util_jni.cc", + "android_packet_creator_jni.cc", + ], + }) + select({ + "//conditions:default": [ + "graph_gl_sync_token.cc", + "graph_texture_frame_jni.cc", + "surface_output_jni.cc", + ], + "//mediapipe/gpu:disable_gpu": [], + }), + hdrs = [ + "colorspace.h", + "compat_jni.h", + "graph.h", + "graph_jni.h", + "graph_service_jni.h", + "packet_context_jni.h", + "packet_creator_jni.h", + "packet_getter_jni.h", + "graph_profiler_jni.h", + ] + select({ + "//conditions:default": [], + "//mediapipe:android": [ + "android_asset_util_jni.h", + "android_packet_creator_jni.h", + ], + }) + select({ + "//conditions:default": [ + "graph_gl_sync_token.h", + "graph_texture_frame_jni.h", + "surface_output_jni.h", + ], + "//mediapipe/gpu:disable_gpu": [], + }), + linkopts = select({ + "//conditions:default": [], + "//mediapipe:android": [ + "-ljnigraphics", + "-lEGL", # This is needed by compat_jni even if GPU is disabled. + ], + }), + visibility = ["//visibility:public"], + deps = [ + ":jni_util", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_profile_cc_proto", + "//mediapipe/framework/tool:calculator_graph_template_cc_proto", + "//mediapipe/framework/formats:image_format_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/framework/formats:time_series_header_cc_proto", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@eigen_archive//:eigen", + "//mediapipe/framework:camera_intrinsics", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/formats:video_stream_header", + "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", + "//mediapipe/framework/tool:name_util", + "//mediapipe/framework/tool:executor_util", + "//mediapipe/framework/port:core_proto", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:threadpool", + "//mediapipe/framework/port:singleton", + "//mediapipe/framework/port:status", + ] + select({ + "//conditions:default": [ + "//mediapipe/framework/port:file_helpers", + ], + "//mediapipe:android": [ + "//mediapipe/util/android/file/base", + "//mediapipe/util/android:asset_manager_util", + ], + }) + select({ + "//conditions:default": [ + "//mediapipe/gpu:gl_quad_renderer", + "//mediapipe/gpu:gl_calculator_helper", + "//mediapipe/gpu:gl_surface_sink_calculator", + "//mediapipe/gpu:gpu_shared_data_internal", + "//mediapipe/gpu:graph_support", + ], + "//mediapipe/gpu:disable_gpu": [ + "//mediapipe/gpu:gpu_shared_data_internal", + ], + }), + alwayslink = 1, +) + +cc_library( + name = "jni_util", + srcs = (["jni_util.cc"]), + hdrs = (["jni_util.h"]), + deps = [ + "@com_google_absl//absl/synchronization", + "//mediapipe/framework/port:logging", + ] + select({ + "//conditions:default": [ + ], + "//mediapipe:android": [ + ], + }), +) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA new file mode 100644 index 000000000..cbc57f510 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA @@ -0,0 +1,7 @@ +tricorder: { + options: { + builder: { + config: "android_arm" + } + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc new file mode 100644 index 000000000..71a36b665 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc @@ -0,0 +1,33 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h" + +#include + +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/singleton.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" +#include "mediapipe/util/android/asset_manager_util.h" + +JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD( + nativeInitializeAssetManager)(JNIEnv* env, jclass clz, + jobject android_context, + jstring cache_dir_path) { + mediapipe::AssetManager* asset_manager = + Singleton::get(); + return asset_manager->InitializeFromActivity( + env, android_context, + mediapipe::android::JStringToStdString(env, cache_dir_path)); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h new file mode 100644 index 000000000..c842433ff --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h @@ -0,0 +1,36 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define ANDROID_ASSET_UTIL_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_AndroidAssetUtil_##METHOD_NAME + +JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD( + nativeInitializeAssetManager)(JNIEnv* env, jclass clz, + jobject android_context, + jstring cache_dir_path); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc new file mode 100644 index 000000000..2f71e649c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc @@ -0,0 +1,117 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h" + +#include + +#include +#include + +#include "absl/memory/memory.h" +#include "mediapipe/framework/formats/image_format.pb.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" + +namespace { + +// Creates a new internal::PacketWithContext object, and returns the native +// handle. +int64_t CreatePacketWithContext(jlong context, + const mediapipe::Packet& packet) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + return mediapipe_graph->WrapPacketIntoContext(packet); +} + +} // namespace + +JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( + nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context, + jobject bitmap) { + AndroidBitmapInfo info; + int result = AndroidBitmap_getInfo(env, bitmap, &info); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result; + return 0L; + } + if (info.stride != info.width * 4) { + LOG(ERROR) << "Bitmap stride: " << info.stride + << "is not equal to 4 times bitmap width: " << info.width; + return 0L; + } + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::SRGB, info.width, info.height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + void* pixel_addr = nullptr; + result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code " + << result; + return 0L; + } + const uint8_t* rgba_data = static_cast(pixel_addr); + mediapipe::android::RgbaToRgb(rgba_data, info.stride, info.width, info.height, + image_frame->MutablePixelData(), + image_frame->WidthStep()); + result = AndroidBitmap_unlockPixels(env, bitmap); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code " + << result; + return 0L; + } + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( + nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context, + jobject bitmap) { + AndroidBitmapInfo info; + int result = AndroidBitmap_getInfo(env, bitmap, &info); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result; + return 0L; + } + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::SRGBA, info.width, info.height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + int64_t buffer_size = info.stride * info.height; + if (buffer_size != image_frame->PixelDataSize()) { + LOG(ERROR) << "Bitmap stride: " << info.stride + << " times bitmap height: " << info.height + << " is not equal to the expected size: " + << image_frame->PixelDataSize(); + return 0L; + } + void* pixel_addr = nullptr; + result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code " + << result; + return 0L; + } + std::memcpy(image_frame->MutablePixelData(), pixel_addr, + image_frame->PixelDataSize()); + result = AndroidBitmap_unlockPixels(env, bitmap); + if (result != ANDROID_BITMAP_RESULT_SUCCESS) { + LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code " + << result; + return 0L; + } + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h new file mode 100644 index 000000000..a1fc587d9 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h @@ -0,0 +1,39 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define ANDROID_PACKET_CREATOR_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_AndroidPacketCreator_##METHOD_NAME + +JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( + nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context, + jobject bitmap); + +JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( + nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context, + jobject bitmap); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h new file mode 100644 index 000000000..f5ad09acd --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h @@ -0,0 +1,60 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_ + +#include + +namespace mediapipe { +namespace android { +// TODO: switch to more efficient implementation, like halide later. + +// Converts an RGBA image to RGB +inline void RgbaToRgb(const uint8_t* rgba_img, int rgba_width_step, int width, + int height, uint8_t* rgb_img, int rgb_width_step) { + for (int y = 0; y < height; ++y) { + const auto* rgba = rgba_img + y * rgba_width_step; + auto* rgb = rgb_img + y * rgb_width_step; + for (int x = 0; x < width; ++x) { + *rgb = *rgba; + *(rgb + 1) = *(rgba + 1); + *(rgb + 2) = *(rgba + 2); + rgb += 3; + rgba += 4; + } + } +} + +// Converts a RGB image to RGBA +inline void RgbToRgba(const uint8_t* rgb_img, int rgb_width_step, int width, + int height, uint8_t* rgba_img, int rgba_width_step, + uint8_t alpha) { + for (int y = 0; y < height; ++y) { + const auto* rgb = rgb_img + y * rgb_width_step; + auto* rgba = rgba_img + y * rgba_width_step; + for (int x = 0; x < width; ++x) { + *rgba = *rgb; + *(rgba + 1) = *(rgb + 1); + *(rgba + 2) = *(rgb + 2); + *(rgba + 3) = alpha; + rgb += 3; + rgba += 4; + } + } +} + +} // namespace android +} // namespace mediapipe +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc new file mode 100644 index 000000000..894116848 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc @@ -0,0 +1,27 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/compat_jni.h" + +#include + +JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env, + jclass clz) { + return reinterpret_cast(eglGetCurrentContext()); +} + +JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)( + JNIEnv* env, jclass clz, jint readdraw) { + return reinterpret_cast(eglGetCurrentSurface(readdraw)); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h new file mode 100644 index 000000000..2d12cc6cd --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h @@ -0,0 +1,37 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define COMPAT_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_Compat_##METHOD_NAME + +JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env, + jclass clz); + +JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)( + JNIEnv* env, jclass clz, jint readdraw); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc new file mode 100644 index 000000000..e26123c1c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc @@ -0,0 +1,600 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" + +#include + +#include + +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/canonical_errors.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/proto_ns.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/threadpool.h" +#include "mediapipe/framework/tool/executor_util.h" +#include "mediapipe/framework/tool/name_util.h" +#include "mediapipe/gpu/gpu_shared_data_internal.h" +#include "mediapipe/gpu/graph_support.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_context_jni.h" +#ifdef __ANDROID__ +#include "mediapipe/util/android/file/base/helpers.h" +#else +#include "mediapipe/framework/port/file_helpers.h" +#endif // __ANDROID__ +#ifndef MEDIAPIPE_DISABLE_GPU +#include "mediapipe/gpu/egl_surface_holder.h" +#endif // !defined(MEDIAPIPE_DISABLE_GPU) + +namespace mediapipe { +namespace android { + +namespace internal { +// PacketWithContext is the native counterpart of the Java Packet. +class PacketWithContext { + public: + PacketWithContext(Graph* context, const Packet& packet) + : context_(context), packet_(packet) {} + + ~PacketWithContext() {} + + Graph* GetContext() { return context_; } + + Packet& packet() { return packet_; } + + private: + Graph* context_; + Packet packet_; +}; + +// A callback handler that wraps the java callback, and submits it for +// execution through Graph. +class CallbackHandler { + public: + CallbackHandler(Graph* context, jobject callback) + : context_(context), java_callback_(callback) {} + + ~CallbackHandler() { + // The jobject global reference is managed by the Graph directly. + // So no-op here. + if (java_callback_) { + LOG(ERROR) << "Java callback global reference is not released."; + } + } + + void PacketCallback(const Packet& packet) { + context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_, + packet); + } + + void PacketWithHeaderCallback(const Packet& packet, const Packet& header) { + context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_, + packet, header); + } + + std::function CreateCallback() { + return std::bind(&CallbackHandler::PacketCallback, this, + std::placeholders::_1); + } + + std::function CreateCallbackWithHeader() { + return std::bind(&CallbackHandler::PacketWithHeaderCallback, this, + std::placeholders::_1, std::placeholders::_2); + } + + // Releases the global reference to the java callback object. + // This is called by the Graph, since releasing of a jni object + // requires JNIEnv object that we can not keep a copy of. + void ReleaseCallback(JNIEnv* env) { + env->DeleteGlobalRef(java_callback_); + java_callback_ = nullptr; + } + + private: + Graph* context_; + // java callback object + jobject java_callback_; +}; +} // namespace internal + +Graph::Graph() + : executor_stack_size_increased_(false), global_java_packet_cls_(nullptr) {} + +Graph::~Graph() { + if (running_graph_) { + running_graph_->Cancel(); + running_graph_->WaitUntilDone().IgnoreError(); + } + // Cleans up the jni objects. + JNIEnv* env = mediapipe::java::GetJNIEnv(); + if (env == nullptr) { + LOG(ERROR) << "Can't attach to java thread, no jni clean up performed."; + return; + } + for (const auto& handler : callback_handlers_) { + handler->ReleaseCallback(env); + } + if (global_java_packet_cls_) { + env->DeleteGlobalRef(global_java_packet_cls_); + global_java_packet_cls_ = nullptr; + } +} + +int64_t Graph::WrapPacketIntoContext(const Packet& packet) { + absl::MutexLock lock(&all_packets_mutex_); + auto packet_context = new internal::PacketWithContext(this, packet); + // Since the value of the all_packets_ map is a unique_ptr, resets it with the + // new allocated object. + all_packets_[packet_context].reset(packet_context); + VLOG(2) << "Graph packet reference buffer size: " << all_packets_.size(); + return reinterpret_cast(packet_context); +} + +// static +Packet Graph::GetPacketFromHandle(int64_t packet_handle) { + internal::PacketWithContext* packet_with_context = + reinterpret_cast(packet_handle); + return packet_with_context->packet(); +} + +// static +Graph* Graph::GetContextFromHandle(int64_t packet_handle) { + internal::PacketWithContext* packet_with_context = + reinterpret_cast(packet_handle); + return packet_with_context->GetContext(); +} + +// static +bool Graph::RemovePacket(int64_t packet_handle) { + internal::PacketWithContext* packet_with_context = + reinterpret_cast(packet_handle); + Graph* context = packet_with_context->GetContext(); + absl::MutexLock lock(&(context->all_packets_mutex_)); + return context->all_packets_.erase(packet_with_context) != 0; +} + +void Graph::EnsureMinimumExecutorStackSizeForJava() {} + +::mediapipe::Status Graph::AddCallbackHandler(std::string output_stream_name, + jobject java_callback) { + if (!graph_config()) { + return ::mediapipe::InternalError("Graph is not loaded!"); + } + std::unique_ptr handler( + new internal::CallbackHandler(this, java_callback)); + std::string side_packet_name; + tool::AddCallbackCalculator(output_stream_name, graph_config(), + &side_packet_name, + /* use_std_function = */ true); + EnsureMinimumExecutorStackSizeForJava(); + side_packets_callbacks_.emplace( + side_packet_name, MakePacket>( + handler->CreateCallback())); + callback_handlers_.emplace_back(std::move(handler)); + return ::mediapipe::OkStatus(); +} + +::mediapipe::Status Graph::AddCallbackWithHeaderHandler( + std::string output_stream_name, jobject java_callback) { + if (!graph_config()) { + return ::mediapipe::InternalError("Graph is not loaded!"); + } + std::unique_ptr handler( + new internal::CallbackHandler(this, java_callback)); + std::string side_packet_name; + tool::AddCallbackWithHeaderCalculator(output_stream_name, output_stream_name, + graph_config(), &side_packet_name, + /* use_std_function = */ true); + EnsureMinimumExecutorStackSizeForJava(); + side_packets_callbacks_.emplace( + side_packet_name, + MakePacket>( + handler->CreateCallbackWithHeader())); + callback_handlers_.emplace_back(std::move(handler)); + return ::mediapipe::OkStatus(); +} + +int64_t Graph::AddSurfaceOutput(const std::string& output_stream_name) { + if (!graph_config()) { + LOG(ERROR) << "Graph is not loaded!"; + return 0; + } + +#ifdef MEDIAPIPE_DISABLE_GPU + LOG(FATAL) << "GPU support has been disabled in this build!"; +#else + CalculatorGraphConfig::Node* sink_node = graph_config()->add_node(); + sink_node->set_name(::mediapipe::tool::GetUnusedNodeName( + *graph_config(), absl::StrCat("egl_surface_sink_", output_stream_name))); + sink_node->set_calculator("GlSurfaceSinkCalculator"); + sink_node->add_input_stream(output_stream_name); + sink_node->add_input_side_packet( + absl::StrCat(kGpuSharedTagName, ":", kGpuSharedSidePacketName)); + + const std::string input_side_packet_name = + ::mediapipe::tool::GetUnusedSidePacketName( + *graph_config(), absl::StrCat(output_stream_name, "_surface")); + sink_node->add_input_side_packet( + absl::StrCat("SURFACE:", input_side_packet_name)); + + auto it_inserted = output_surface_side_packets_.emplace( + input_side_packet_name, + AdoptAsUniquePtr(new mediapipe::EglSurfaceHolder())); + + return WrapPacketIntoContext(it_inserted.first->second); +#endif // defined(MEDIAPIPE_DISABLE_GPU) +} + +::mediapipe::Status Graph::LoadBinaryGraph(std::string path_to_graph) { + std::string graph_config_string; + ::mediapipe::Status status = + mediapipe::file::GetContents(path_to_graph, &graph_config_string); + if (!status.ok()) { + return status; + } + return LoadBinaryGraph(graph_config_string.c_str(), + graph_config_string.length()); +} + +::mediapipe::Status Graph::LoadBinaryGraph(const char* data, int size) { + CalculatorGraphConfig graph_config; + if (!graph_config.ParseFromArray(data, size)) { + return ::mediapipe::InvalidArgumentError("Failed to parse the graph"); + } + graph_configs_.push_back(graph_config); + return ::mediapipe::OkStatus(); +} + +::mediapipe::Status Graph::LoadBinaryGraphTemplate(const char* data, int size) { + CalculatorGraphTemplate graph_template; + if (!graph_template.ParseFromArray(data, size)) { + return ::mediapipe::InvalidArgumentError("Failed to parse the graph"); + } + graph_templates_.push_back(graph_template); + return ::mediapipe::OkStatus(); +} + +::mediapipe::Status Graph::SetGraphType(std::string graph_type) { + graph_type_ = graph_type; + return ::mediapipe::OkStatus(); +} + +::mediapipe::Status Graph::SetGraphOptions(const char* data, int size) { + if (!graph_options_.ParseFromArray(data, size)) { + return ::mediapipe::InvalidArgumentError("Failed to parse the graph"); + } + return ::mediapipe::OkStatus(); +} + +CalculatorGraphConfig Graph::GetCalculatorGraphConfig() { + CalculatorGraph temp_graph; + ::mediapipe::Status status = InitializeGraph(&temp_graph); + if (!status.ok()) { + LOG(ERROR) << "GetCalculatorGraphConfig failed:\n" << status.message(); + } + return temp_graph.Config(); +} + +void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj, + const Packet& packet) { + jclass callback_cls = env->GetObjectClass(java_callback_obj); + jmethodID processMethod = env->GetMethodID( + callback_cls, "process", + absl::StrFormat("(L%s;)V", std::string(Graph::kJavaPacketClassName)) + .c_str()); + + int64_t packet_handle = WrapPacketIntoContext(packet); + // Creates a Java Packet. + VLOG(2) << "Creating java packet preparing for callback to java."; + jobject java_packet = + CreateJavaPacket(env, global_java_packet_cls_, packet_handle); + VLOG(2) << "Calling java callback."; + env->CallVoidMethod(java_callback_obj, processMethod, java_packet); + // release the packet after callback. + RemovePacket(packet_handle); + env->DeleteLocalRef(callback_cls); + env->DeleteLocalRef(java_packet); + VLOG(2) << "Returned from java callback."; +} + +void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj, + const Packet& packet, const Packet& header_packet) { + jclass callback_cls = env->GetObjectClass(java_callback_obj); + jmethodID processMethod = env->GetMethodID( + callback_cls, "process", + absl::StrFormat("(L%s;L%s;)V", std::string(Graph::kJavaPacketClassName), + std::string(Graph::kJavaPacketClassName)) + .c_str()); + + int64_t packet_handle = WrapPacketIntoContext(packet); + int64_t header_packet_handle = WrapPacketIntoContext(header_packet); + // Creates a Java Packet. + jobject java_packet = + CreateJavaPacket(env, global_java_packet_cls_, packet_handle); + jobject java_header_packet = + CreateJavaPacket(env, global_java_packet_cls_, header_packet_handle); + env->CallVoidMethod(java_callback_obj, processMethod, java_packet, + java_header_packet); + // release the packet after callback. + RemovePacket(packet_handle); + RemovePacket(header_packet_handle); + env->DeleteLocalRef(callback_cls); + env->DeleteLocalRef(java_packet); + env->DeleteLocalRef(java_header_packet); +} + +void Graph::SetPacketJavaClass(JNIEnv* env) { + if (global_java_packet_cls_ == nullptr) { + jclass packet_cls = + env->FindClass(mediapipe::android::Graph::kJavaPacketClassName); + global_java_packet_cls_ = + reinterpret_cast(env->NewGlobalRef(packet_cls)); + } +} + +::mediapipe::Status Graph::RunGraphUntilClose(JNIEnv* env) { + // Get a global reference to the packet class, so it can be used in other + // native thread for call back. + SetPacketJavaClass(env); + // Running as a synchronized mode, the same Java thread is available through + // out the run. + CalculatorGraph calculator_graph; + ::mediapipe::Status status = InitializeGraph(&calculator_graph); + if (!status.ok()) { + LOG(ERROR) << status.message(); + running_graph_.reset(nullptr); + return status; + } + // TODO: gpu & services set up! + status = calculator_graph.Run(CreateCombinedSidePackets()); + LOG(INFO) << "Graph run finished."; + + return status; +} + +::mediapipe::Status Graph::StartRunningGraph(JNIEnv* env) { + if (running_graph_) { + return ::mediapipe::InternalError("Graph is already running."); + } + // Get a global reference to the packet class, so it can be used in other + // native thread for call back. + SetPacketJavaClass(env); + // Running as a synchronized mode, the same Java thread is available + // throughout the run. + running_graph_.reset(new CalculatorGraph()); + // Set the mode for adding packets to graph input streams. + running_graph_->SetGraphInputStreamAddMode(graph_input_stream_add_mode_); + if (VLOG_IS_ON(2)) { + LOG(INFO) << "input packet streams:"; + for (auto& name : graph_config()->input_stream()) { + LOG(INFO) << name; + } + } + ::mediapipe::Status status; +#ifndef MEDIAPIPE_DISABLE_GPU + status = running_graph_->SetGpuResources(gpu_resources_); + if (!status.ok()) { + LOG(ERROR) << status.message(); + running_graph_.reset(nullptr); + return status; + } +#endif // !defined(MEDIAPIPE_DISABLE_GPU) + + for (const auto& service_packet : service_packets_) { + status = running_graph_->SetServicePacket(*service_packet.first, + service_packet.second); + if (!status.ok()) { + LOG(ERROR) << status.message(); + running_graph_.reset(nullptr); + return status; + } + } + + status = InitializeGraph(running_graph_.get()); + if (!status.ok()) { + LOG(ERROR) << status.message(); + running_graph_.reset(nullptr); + return status; + } + LOG(INFO) << "Start running the graph, waiting for inputs."; + status = + running_graph_->StartRun(CreateCombinedSidePackets(), stream_headers_); + if (!status.ok()) { + LOG(ERROR) << status; + running_graph_.reset(nullptr); + return status; + } + return mediapipe::OkStatus(); +} + +::mediapipe::Status Graph::SetTimestampAndMovePacketToInputStream( + const std::string& stream_name, int64_t packet_handle, int64_t timestamp) { + internal::PacketWithContext* packet_with_context = + reinterpret_cast(packet_handle); + Packet& packet = packet_with_context->packet(); + + // Set the timestamp of the packet in-place by calling the rvalue-reference + // version of At here. + packet = std::move(packet).At(Timestamp(timestamp)); + + // Then std::move it into the input stream. + return AddPacketToInputStream(stream_name, std::move(packet)); +} + +::mediapipe::Status Graph::AddPacketToInputStream( + const std::string& stream_name, const Packet& packet) { + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + + return running_graph_->AddPacketToInputStream(stream_name, packet); +} + +::mediapipe::Status Graph::AddPacketToInputStream( + const std::string& stream_name, Packet&& packet) { + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + + return running_graph_->AddPacketToInputStream(stream_name, std::move(packet)); +} + +::mediapipe::Status Graph::CloseInputStream(std::string stream_name) { + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + LOG(INFO) << "Close input stream: " << stream_name; + return running_graph_->CloseInputStream(stream_name); +} + +::mediapipe::Status Graph::CloseAllInputStreams() { + LOG(INFO) << "Close all input streams."; + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + return running_graph_->CloseAllInputStreams(); +} + +::mediapipe::Status Graph::CloseAllPacketSources() { + LOG(INFO) << "Close all input streams."; + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + return running_graph_->CloseAllPacketSources(); +} + +::mediapipe::Status Graph::WaitUntilDone(JNIEnv* env) { + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + ::mediapipe::Status status = running_graph_->WaitUntilDone(); + running_graph_.reset(nullptr); + return status; +} + +::mediapipe::Status Graph::WaitUntilIdle(JNIEnv* env) { + if (!running_graph_) { + return ::mediapipe::FailedPreconditionError("Graph must be running."); + } + return running_graph_->WaitUntilIdle(); +} + +void Graph::SetInputSidePacket(const std::string& stream_name, + const Packet& packet) { + side_packets_[stream_name] = packet; +} + +void Graph::SetStreamHeader(const std::string& stream_name, + const Packet& packet) { + stream_headers_[stream_name] = packet; + LOG(INFO) << stream_name << " stream header being set."; +} + +void Graph::SetGraphInputStreamAddMode( + CalculatorGraph::GraphInputStreamAddMode mode) { + graph_input_stream_add_mode_ = mode; +} + +mediapipe::GpuResources* Graph::GetGpuResources() const { + return gpu_resources_.get(); +} + +::mediapipe::Status Graph::SetParentGlContext(int64 java_gl_context) { + if (gpu_resources_) { + return ::mediapipe::AlreadyExistsError( + "trying to set the parent GL context, but the gpu shared " + "data has already been set up."); + } +#ifdef MEDIAPIPE_DISABLE_GPU + LOG(FATAL) << "GPU support has been disabled in this build!"; +#else + gpu_resources_ = mediapipe::GpuResources::Create( + reinterpret_cast(java_gl_context)) + .ValueOrDie(); +#endif // defined(MEDIAPIPE_DISABLE_GPU) + return ::mediapipe::OkStatus(); +} + +void Graph::SetServicePacket(const GraphServiceBase& service, Packet packet) { + service_packets_[&service] = std::move(packet); +} + +void Graph::CancelGraph() { + if (running_graph_) { + running_graph_->Cancel(); + } +} + +std::map Graph::CreateCombinedSidePackets() { + std::map combined_side_packets = side_packets_callbacks_; + combined_side_packets.insert(side_packets_.begin(), side_packets_.end()); + combined_side_packets.insert(output_surface_side_packets_.begin(), + output_surface_side_packets_.end()); + return combined_side_packets; +} + +ProfilingContext* Graph::GetProfilingContext() { + if (running_graph_) { + return running_graph_->profiler(); + } + return nullptr; +} + +CalculatorGraphConfig* Graph::graph_config() { + // Return the last specified graph config with the required graph_type. + for (auto it = graph_configs_.rbegin(); it != graph_configs_.rend(); ++it) { + if (it->type() == graph_type()) { + return &*it; + } + } + for (auto it = graph_templates_.rbegin(); it != graph_templates_.rend(); + ++it) { + if (it->mutable_config()->type() == graph_type()) { + return it->mutable_config(); + } + } + return nullptr; +} + +std::string Graph::graph_type() { + // If a graph-type is specified, that type is used. Otherwise the + // graph-type of the last specified graph config is used. + if (graph_type_ != "") { + return graph_type_; + } + if (!graph_configs_.empty()) { + return graph_configs_.back().type(); + } + if (!graph_templates_.empty()) { + return graph_templates_.back().config().type(); + } + return ""; +} + +::mediapipe::Status Graph::InitializeGraph(CalculatorGraph* graph) { + if (graph_configs_.size() == 1 && graph_templates_.empty()) { + return graph->Initialize(*graph_config()); + } else { + return graph->Initialize(graph_configs_, graph_templates_, {}, graph_type(), + &graph_options_); + } +} + +} // namespace android +} // namespace mediapipe diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h new file mode 100644 index 000000000..c6f64b6fe --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h @@ -0,0 +1,247 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "mediapipe/framework/calculator_framework.h" +#ifndef MEDIAPIPE_DISABLE_GPU +#include "mediapipe/gpu/gl_calculator_helper.h" +#endif // !defined(MEDIAPIPE_DISABLE_GPU) +#include "absl/synchronization/mutex.h" +#include "mediapipe/gpu/gpu_shared_data_internal.h" + +namespace mediapipe { +namespace android { + +namespace internal { +class CallbackHandler; +class PacketWithContext; +} // namespace internal + +// Graph is used to keep mediapipe related native objects into one place, +// so that we can clean up or query later. +class Graph { + public: + // The Packet java class name. + static constexpr char const* kJavaPacketClassName = + "com/google/mediapipe/framework/Packet"; + + Graph(); + Graph(const Graph&) = delete; + Graph& operator=(const Graph&) = delete; + ~Graph(); + + // Adds a callback for a given stream name. + ::mediapipe::Status AddCallbackHandler(std::string output_stream_name, + jobject java_callback); + + // Adds a packet with header callback for a given stream name. + ::mediapipe::Status AddCallbackWithHeaderHandler( + std::string output_stream_name, jobject java_callback); + + // Loads a binary graph from a file. + ::mediapipe::Status LoadBinaryGraph(std::string path_to_graph); + // Loads a binary graph from a buffer. + ::mediapipe::Status LoadBinaryGraph(const char* data, int size); + // Loads a binary graph template from a buffer. + ::mediapipe::Status LoadBinaryGraphTemplate(const char* data, int size); + // Specifies the CalculatorGraphConfig::type of the top level graph. + ::mediapipe::Status SetGraphType(std::string graph_type); + // Specifies options such as template arguments for the graph. + ::mediapipe::Status SetGraphOptions(const char* data, int size); + + // Returns the expanded calculator graph config. + CalculatorGraphConfig GetCalculatorGraphConfig(); + + // Runs the graph until it closes. + // Mainly is used for writing tests. + ::mediapipe::Status RunGraphUntilClose(JNIEnv* env); + + // The following 4 functions are used to run the graph in + // step by step mode, the usual call sequence is like this: + // StartRunningGraph + // Loop: + // AddPacketToInputStream + // CloseInputStream + // WaitUtilDone + // TODO: We need to have a synchronized wait for each step, i.e., + // wait until nothing is running and nothing can be scheduled. + // + // Starts running the graph. + ::mediapipe::Status StartRunningGraph(JNIEnv* env); + // Closes one input stream. + ::mediapipe::Status CloseInputStream(std::string stream_name); + // Closes all the graph input streams. + ::mediapipe::Status CloseAllInputStreams(); + // Closes all the graph packet sources. + ::mediapipe::Status CloseAllPacketSources(); + // Waits util graph is done. + ::mediapipe::Status WaitUntilDone(JNIEnv* env); + // Waits util graph is idle. + ::mediapipe::Status WaitUntilIdle(JNIEnv* env); + // Adds a packet to an input stream. + ::mediapipe::Status AddPacketToInputStream(const std::string& stream_name, + const Packet& packet); + // Moves a packet into an input stream. + ::mediapipe::Status AddPacketToInputStream(const std::string& stream_name, + Packet&& packet); + // Takes the MediaPipe Packet referenced by the handle, sets its timestamp, + // and then tries to move the Packet into the given input stream. + ::mediapipe::Status SetTimestampAndMovePacketToInputStream( + const std::string& stream_name, int64_t packet_handle, int64_t timestamp); + + // Sets the mode for adding packets to a graph input stream. + void SetGraphInputStreamAddMode( + CalculatorGraph::GraphInputStreamAddMode mode); + // Adds one input side packet. + void SetInputSidePacket(const std::string& stream_name, const Packet& packet); + + // Adds one stream header. + void SetStreamHeader(const std::string& stream_name, const Packet& packet); + + // Puts a mediapipe packet into the context for management. + // Returns the handle to the internal PacketWithContext object. + int64_t WrapPacketIntoContext(const Packet& packet); + + // Gets the shared mediapipe::GpuResources. Only valid once the graph is + // running. + mediapipe::GpuResources* GetGpuResources() const; + + // Adds a surface output for a given stream name. + // Multiple outputs can be attached to the same stream. + // Returns a native packet handle for the mediapipe::EglSurfaceHolder, or 0 in + // case of failure. + int64_t AddSurfaceOutput(const std::string& stream_name); + + // Sets a parent GL context to use for texture sharing. + ::mediapipe::Status SetParentGlContext(int64 java_gl_context); + + // Sets the object for a service. + template + void SetServiceObject(const GraphService& service, + std::shared_ptr object) { + SetServicePacket(service, + MakePacket>(std::move(object))); + } + void SetServicePacket(const GraphServiceBase& service, Packet packet); + + // Cancels the currently running graph. + void CancelGraph(); + + // Returns false if not in the context. + static bool RemovePacket(int64_t packet_handle); + + // Returns the mediapipe Packet that is referenced by the handle. + static Packet GetPacketFromHandle(int64_t packet_handle); + + // Returns the Graph that is managing the packet. + static Graph* GetContextFromHandle(int64_t packet_handle); + + // Invokes a Java packet callback. + void CallbackToJava(JNIEnv* env, jobject java_callback_obj, + const Packet& packet); + + // Invokes a Java packet callback with header. + void CallbackToJava(JNIEnv* env, jobject java_callback_obj, + const Packet& packet, const Packet& header_packet); + + ProfilingContext* GetProfilingContext(); + + private: + // Increase the graph's default executor's worker thread stack size to run + // Java callbacks. Java's class loader may make deep recursive calls and + // result in a StackOverflowError. The non-portable ThreadPool class in + // thread/threadpool.h uses a default stack size of 64 KB, which is too + // small for Java's class loader. See bug 72414047. + void EnsureMinimumExecutorStackSizeForJava(); + void SetPacketJavaClass(JNIEnv* env); + std::map CreateCombinedSidePackets(); + // Returns the top-level CalculatorGraphConfig, or nullptr if the top-level + // CalculatorGraphConfig is not yet defined. + CalculatorGraphConfig* graph_config(); + // Returns the top-level CalculatorGraphConfig::type, or "" if the top-level + // CalculatorGraphConfig::type is not yet defined. + std::string graph_type(); + // Initializes CalculatorGraph |graph| using the loaded graph-configs. + ::mediapipe::Status InitializeGraph(CalculatorGraph* graph); + + // CalculatorGraphConfigs for the calculator graph and subgraphs. + std::vector graph_configs_; + // CalculatorGraphTemplates for the calculator graph and subgraphs. + std::vector graph_templates_; + // Options such as template arguments for the top-level calculator graph. + Subgraph::SubgraphOptions graph_options_; + // The CalculatorGraphConfig::type of the top-level calculator graph. + std::string graph_type_ = ""; + + // Used by EnsureMinimumExecutorStackSizeForJava() to ensure that the + // default executor's stack size is increased only once. + bool executor_stack_size_increased_; + // Holds a global reference to a Packet class, so that this can be + // used from native attached thread. This is the suggested workaround for + // jni findclass issue. + jclass global_java_packet_cls_; + // All mediapipe Packet managed/referenced by the context. + // The map is used for the Java code to be able to look up the Packet + // based on the handler(pointer). + std::unordered_map> + all_packets_; + absl::Mutex all_packets_mutex_; + // All callback handlers managed by the context. + std::vector> callback_handlers_; + + // mediapipe::GpuResources used by the graph. + // Note: this class does not create a CalculatorGraph until StartRunningGraph + // is called, and we may have to create the mediapipe::GpuResources before + // that time, e.g. before a SurfaceOutput is associated with a Surface. + std::shared_ptr gpu_resources_; + + // Maps surface output names to the side packet used for the associated + // surface. + std::unordered_map output_surface_side_packets_; + + // Side packets used for callbacks. + std::map side_packets_callbacks_; + + // Side packets set using SetInputSidePacket. + std::map side_packets_; + + // Service packets held here before the graph's creation. + std::map service_packets_; + + // All headers that required by the graph input streams. + // Note: header has to be set for the calculators that require it during + // Open(). + std::map stream_headers_; + + std::unique_ptr running_graph_; + CalculatorGraph::GraphInputStreamAddMode graph_input_stream_add_mode_ = + CalculatorGraph::GraphInputStreamAddMode::WAIT_TILL_NOT_FULL; +}; + +} // namespace android +} // namespace mediapipe + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.cc new file mode 100644 index 000000000..4aa859071 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.cc @@ -0,0 +1,40 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.h" + +#include + +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/gpu/gl_context.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnCpu)( + JNIEnv* env, jclass cls, jlong syncToken) { + mediapipe::GlSyncToken& token = + *reinterpret_cast(syncToken); + token->Wait(); +} + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnGpu)( + JNIEnv* env, jclass cls, jlong syncToken) { + mediapipe::GlSyncToken& token = + *reinterpret_cast(syncToken); + token->WaitOnGpu(); +} + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeRelease)( + JNIEnv* env, jclass cls, jlong syncToken) { + delete reinterpret_cast(syncToken); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.h new file mode 100644 index 000000000..a7650866e --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.h @@ -0,0 +1,42 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define GRAPH_GL_SYNC_TOKEN_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_GraphGlSyncToken_##METHOD_NAME + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnCpu)(JNIEnv *, + jclass, + jlong); + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnGpu)(JNIEnv *, + jclass, + jlong); + +JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeRelease)(JNIEnv *, + jclass, jlong); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.cc new file mode 100644 index 000000000..d968ff5d0 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.cc @@ -0,0 +1,379 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_jni.h" + +#include + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/canonical_errors.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" + +using mediapipe::android::JStringToStdString; + +namespace { +mediapipe::Status AddSidePacketsIntoGraph( + mediapipe::android::Graph* mediapipe_graph, JNIEnv* env, + jobjectArray stream_names, jlongArray packets) { + jsize num_side_packets = env->GetArrayLength(stream_names); + if (num_side_packets != env->GetArrayLength(packets)) { + return mediapipe::InvalidArgumentError( + "Number of streams and packets doesn't match!"); + } + // Note, packets_array_ref is really a const jlong* but this clashes with the + // the expectation of ReleaseLongArrayElements below. + jlong* packets_array_ref = env->GetLongArrayElements(packets, nullptr); + for (jsize i = 0; i < num_side_packets; ++i) { + jstring name = + reinterpret_cast(env->GetObjectArrayElement(stream_names, i)); + mediapipe_graph->SetInputSidePacket( + JStringToStdString(env, name), + mediapipe::android::Graph::GetPacketFromHandle(packets_array_ref[i])); + env->DeleteLocalRef(name); + } + env->ReleaseLongArrayElements(packets, packets_array_ref, JNI_ABORT); + return mediapipe::OkStatus(); +} + +mediapipe::Status AddStreamHeadersIntoGraph( + mediapipe::android::Graph* mediapipe_graph, JNIEnv* env, + jobjectArray stream_names, jlongArray packets) { + jsize num_headers = env->GetArrayLength(stream_names); + if (num_headers != env->GetArrayLength(packets)) { + return mediapipe::Status(::mediapipe::StatusCode::kFailedPrecondition, + "Number of streams and packets doesn't match!"); + } + jlong* packets_array_ref = env->GetLongArrayElements(packets, nullptr); + for (jsize i = 0; i < num_headers; ++i) { + jstring name = + reinterpret_cast(env->GetObjectArrayElement(stream_names, i)); + mediapipe_graph->SetStreamHeader( + JStringToStdString(env, name), + mediapipe::android::Graph::GetPacketFromHandle(packets_array_ref[i])); + env->DeleteLocalRef(name); + } + env->ReleaseLongArrayElements(packets, packets_array_ref, JNI_ABORT); + return mediapipe::OkStatus(); +} + +// Creates a java MediaPipeException object for a mediapipe::Status. +jthrowable CreateMediaPipeException(JNIEnv* env, mediapipe::Status status) { + jclass status_cls = + env->FindClass("com/google/mediapipe/framework/MediaPipeException"); + jmethodID status_ctr = env->GetMethodID(status_cls, "", "(I[B)V"); + int length = status.message().length(); + jbyteArray message_bytes = env->NewByteArray(length); + env->SetByteArrayRegion(message_bytes, 0, length, + reinterpret_cast(const_cast( + std::string(status.message()).c_str()))); + return reinterpret_cast( + env->NewObject(status_cls, status_ctr, status.code(), message_bytes)); +} + +// Throws a MediaPipeException for any non-ok mediapipe::Status. +// Note that the exception is thrown after execution returns to Java. +bool ThrowIfError(JNIEnv* env, mediapipe::Status status) { + if (!status.ok()) { + env->Throw(CreateMediaPipeException(env, status)); + return true; + } + return false; +} +} // namespace + +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeCreateGraph)(JNIEnv* env, + jobject thiz) { + if (!mediapipe::java::SetJavaVM(env)) { + return 0; + } + return reinterpret_cast(new mediapipe::android::Graph()); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeReleaseGraph)(JNIEnv* env, + jobject thiz, + jlong context) { + delete reinterpret_cast(context); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraph)(JNIEnv* env, + jobject thiz, + jlong context, + jstring path) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + const char* path_ref = env->GetStringUTFChars(path, nullptr); + // Make a copy of the std::string and release the jni reference. + std::string path_to_graph(path_ref); + env->ReleaseStringUTFChars(path, path_ref); + ThrowIfError(env, mediapipe_graph->LoadBinaryGraph(path_to_graph)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphBytes)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + jbyte* data_ptr = env->GetByteArrayElements(data, nullptr); + int size = env->GetArrayLength(data); + mediapipe::Status status = + mediapipe_graph->LoadBinaryGraph(reinterpret_cast(data_ptr), size); + env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT); + ThrowIfError(env, status); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphTemplate)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + jbyte* data_ptr = env->GetByteArrayElements(data, nullptr); + int size = env->GetArrayLength(data); + mediapipe::Status status = mediapipe_graph->LoadBinaryGraphTemplate( + reinterpret_cast(data_ptr), size); + env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT); + ThrowIfError(env, status); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphType)(JNIEnv* env, + jobject thiz, + jlong context, + jstring graph_type) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + const char* graph_type_ref = env->GetStringUTFChars(graph_type, nullptr); + // Make a copy of the std::string and release the jni reference. + std::string graph_type_string(graph_type_ref); + env->ReleaseStringUTFChars(graph_type, graph_type_ref); + ThrowIfError(env, mediapipe_graph->SetGraphType(graph_type_string)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphOptions)(JNIEnv* env, + jobject thiz, + jlong context, + jbyteArray data) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + jbyte* data_ptr = env->GetByteArrayElements(data, nullptr); + int size = env->GetArrayLength(data); + mediapipe::Status status = + mediapipe_graph->SetGraphOptions(reinterpret_cast(data_ptr), size); + env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT); + ThrowIfError(env, status); +} + +JNIEXPORT jbyteArray JNICALL GRAPH_METHOD(nativeGetCalculatorGraphConfig)( + JNIEnv* env, jobject thiz, jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + auto graph = mediapipe_graph->GetCalculatorGraphConfig(); + if (graph.IsInitialized()) { + int size = graph.ByteSize(); + char* buffer = new char[size]; + graph.SerializeToArray(buffer, size); + jbyteArray byteArray = env->NewByteArray(size); + env->SetByteArrayRegion(byteArray, 0, size, + reinterpret_cast(buffer)); + return byteArray; + } + return nullptr; +} + +JNIEXPORT void JNICALL +GRAPH_METHOD(nativeAddPacketCallback)(JNIEnv* env, jobject thiz, jlong context, + jstring stream_name, jobject callback) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + std::string output_stream_name = JStringToStdString(env, stream_name); + + // Create a global reference to the callback object, so that it can + // be accessed later. + jobject global_callback_ref = env->NewGlobalRef(callback); + if (!global_callback_ref) { + ThrowIfError( + env, ::mediapipe::InternalError("Failed to allocate packet callback")); + return; + } + ThrowIfError(env, mediapipe_graph->AddCallbackHandler(output_stream_name, + global_callback_ref)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketWithHeaderCallback)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, + jobject callback) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + std::string output_stream_name = JStringToStdString(env, stream_name); + + // Create a global reference to the callback object, so that it can + // be accessed later. + jobject global_callback_ref = env->NewGlobalRef(callback); + if (!global_callback_ref) { + ThrowIfError( + env, ::mediapipe::InternalError("Failed to allocate packet callback")); + return; + } + ThrowIfError(env, mediapipe_graph->AddCallbackWithHeaderHandler( + output_stream_name, global_callback_ref)); +} + +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeAddSurfaceOutput)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + std::string output_stream_name = JStringToStdString(env, stream_name); + + return mediapipe_graph->AddSurfaceOutput(output_stream_name); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeRunGraphUntilClose)( + JNIEnv* env, jobject thiz, jlong context, jobjectArray stream_names, + jlongArray packets) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + if (ThrowIfError(env, AddSidePacketsIntoGraph(mediapipe_graph, env, + stream_names, packets))) { + return; + } + ThrowIfError(env, mediapipe_graph->RunGraphUntilClose(env)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeStartRunningGraph)( + JNIEnv* env, jobject thiz, jlong context, jobjectArray side_packet_names, + jlongArray side_packet_handles, jobjectArray stream_names_with_header, + jlongArray header_handles) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + if (ThrowIfError( + env, AddSidePacketsIntoGraph(mediapipe_graph, env, side_packet_names, + side_packet_handles))) { + return; + } + if (ThrowIfError(env, AddStreamHeadersIntoGraph(mediapipe_graph, env, + stream_names_with_header, + header_handles))) { + return; + } + ThrowIfError(env, mediapipe_graph->StartRunningGraph(env)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketToInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet, + jlong timestamp) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + // We push in a copy of the current packet at the given timestamp. + ThrowIfError(env, + mediapipe_graph->AddPacketToInputStream( + JStringToStdString(env, stream_name), + mediapipe::android::Graph::GetPacketFromHandle(packet).At( + mediapipe::Timestamp(timestamp)))); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeMovePacketToInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet, + jlong timestamp) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + + ThrowIfError( + env, mediapipe_graph->SetTimestampAndMovePacketToInputStream( + JStringToStdString(env, stream_name), + static_cast(packet), static_cast(timestamp))); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphInputStreamBlockingMode)( + JNIEnv* env, jobject thiz, jlong context, jboolean mode) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + if (mode) { + mediapipe_graph->SetGraphInputStreamAddMode( + mediapipe::CalculatorGraph::GraphInputStreamAddMode:: + WAIT_TILL_NOT_FULL); + } else { + mediapipe_graph->SetGraphInputStreamAddMode( + mediapipe::CalculatorGraph::GraphInputStreamAddMode::ADD_IF_NOT_FULL); + } +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->CloseInputStream( + JStringToStdString(env, stream_name))); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllInputStreams)(JNIEnv* env, + jobject thiz, + jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->CloseAllInputStreams()); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllPacketSources)( + JNIEnv* env, jobject thiz, jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->CloseAllPacketSources()); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphDone)(JNIEnv* env, + jobject thiz, + jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->WaitUntilDone(env)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphIdle)(JNIEnv* env, + jobject thiz, + jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->WaitUntilIdle(env)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeUpdatePacketReference)( + JNIEnv* env, jobject thiz, jlong reference_packet, jlong new_packet) { + auto reference = + mediapipe::android::Graph::GetPacketFromHandle(reference_packet) + .Get>() + .get(); + auto new_value = mediapipe::android::Graph::GetPacketFromHandle(new_packet); + reference->UpdatePacket(new_value); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetParentGlContext)( + JNIEnv* env, jobject thiz, jlong context, jlong javaGlContext) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + ThrowIfError(env, mediapipe_graph->SetParentGlContext(javaGlContext)); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCancelGraph)(JNIEnv* env, + jobject thiz, + jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + mediapipe_graph->CancelGraph(); +} + +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeGetProfiler)(JNIEnv* env, + jobject thiz, + jlong context) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + return reinterpret_cast(mediapipe_graph->GetProfilingContext()); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.h new file mode 100644 index 000000000..e08e36f4d --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_jni.h @@ -0,0 +1,129 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define GRAPH_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_Graph_##METHOD_NAME + +// Creates a native mediapipe context. +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeCreateGraph)(JNIEnv* env, + jobject thiz); + +// Releases a native mediapipe context. +JNIEXPORT void JNICALL GRAPH_METHOD(nativeReleaseGraph)(JNIEnv* env, + jobject thiz, + jlong context); + +// Loads a binary mediapipe graph into the context. +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraph)(JNIEnv* env, + jobject thiz, + jlong context, + jstring path); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphBytes)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphTemplate)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphType)(JNIEnv* env, + jobject thiz, + jlong context, + jstring graph_type); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphOptions)(JNIEnv* env, + jobject thiz, + jlong context, + jbyteArray data); + +JNIEXPORT jbyteArray JNICALL GRAPH_METHOD(nativeGetCalculatorGraphConfig)( + JNIEnv* env, jobject thiz, jlong context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketCallback)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, + jobject callback); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketWithHeaderCallback)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, + jobject callback); + +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeAddSurfaceOutput)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeRunGraphUntilClose)( + JNIEnv* env, jobject thiz, jlong context, jobjectArray stream_names, + jlongArray packets); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeStartRunningGraph)( + JNIEnv* env, jobject thiz, jlong context, jobjectArray side_packet_names, + jlongArray side_packet_handles, jobjectArray stream_names_with_header, + jlongArray header_handles); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketToInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet, + jlong timestamp); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeMovePacketToInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet, + jlong timestamp); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphInputStreamBlockingMode)( + JNIEnv* env, jobject thiz, jlong context, jboolean mode); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseInputStream)( + JNIEnv* env, jobject thiz, jlong context, jstring stream_name); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllInputStreams)(JNIEnv* env, + jobject thiz, + jlong context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllPacketSources)(JNIEnv* env, + jobject thiz, + jlong context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphDone)(JNIEnv* env, + jobject thiz, + jlong context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphIdle)(JNIEnv* env, + jobject thiz, + jlong context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeUpdatePacketReference)( + JNIEnv* env, jobject thiz, jlong reference_packet, jlong new_packet); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetParentGlContext)( + JNIEnv* env, jobject thiz, jlong context, jlong javaGlContext); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeCancelGraph)(JNIEnv* env, + jobject thiz, + jlong context); + +JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeGetProfiler)(JNIEnv* env, + jobject thiz, + jlong context); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.cc new file mode 100644 index 000000000..c530ef062 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.cc @@ -0,0 +1,72 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_profiler_jni.h" + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_profile.pb.h" + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeReset)(JNIEnv* env, jobject thiz, + jlong handle) { + mediapipe::ProfilingContext* profiling_context = + reinterpret_cast(handle); + profiling_context->Reset(); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativePause)(JNIEnv* env, jobject thiz, + jlong handle) { + mediapipe::ProfilingContext* profiling_context = + reinterpret_cast(handle); + profiling_context->Pause(); +} + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeResume)(JNIEnv* env, jobject thiz, + jlong handle) { + mediapipe::ProfilingContext* profiling_context = + reinterpret_cast(handle); + profiling_context->Resume(); +} + +JNIEXPORT jobjectArray JNICALL GRAPH_METHOD(nativeGetCalculatorProfiles)( + JNIEnv* env, jobject thiz, jlong handle) { + mediapipe::ProfilingContext* profiling_context = + reinterpret_cast(handle); + + std::vector profiles_vec; + if (profiling_context->GetCalculatorProfiles(&profiles_vec) != + ::mediapipe::OkStatus()) { + return nullptr; + } + int num_profiles = profiles_vec.size(); + if (num_profiles == 0) { + return nullptr; + } + + jobjectArray profiles = + env->NewObjectArray(num_profiles, env->FindClass("[B"), nullptr); + for (int i = 0; i < num_profiles; i++) { + const auto& profile = profiles_vec[i]; + int size = profile.ByteSize(); + + jbyteArray byteArray = env->NewByteArray(size); + jbyte* byteArrayBuffer = env->GetByteArrayElements(byteArray, nullptr); + profile.SerializeToArray(byteArrayBuffer, size); + env->ReleaseByteArrayElements(byteArray, byteArrayBuffer, 0); + + env->SetObjectArrayElement(profiles, i, byteArray); + env->DeleteLocalRef(byteArray); + } + + return profiles; +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.h new file mode 100644 index 000000000..720814e31 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_profiler_jni.h @@ -0,0 +1,43 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define GRAPH_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_GraphProfiler_##METHOD_NAME + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeReset)(JNIEnv* env, jobject thiz, + jlong profiling_context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativeResume)(JNIEnv* env, jobject thiz, + jlong profiling_context); + +JNIEXPORT void JNICALL GRAPH_METHOD(nativePause)(JNIEnv* env, jobject thiz, + jlong profiling_context); + +JNIEXPORT jobjectArray JNICALL GRAPH_METHOD(nativeGetCalculatorProfiles)( + JNIEnv* env, jobject thiz, jlong profiling_context); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.cc new file mode 100644 index 000000000..6e87fa309 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.cc @@ -0,0 +1,31 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_service_jni.h" + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" + +namespace mediapipe { +namespace android { + +void GraphServiceHelper::SetServicePacket(jlong context_handle, + const GraphServiceBase& service, + Packet packet) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context_handle); + mediapipe_graph->SetServicePacket(service, packet); +} + +} // namespace android +} // namespace mediapipe diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.h new file mode 100644 index 000000000..0bd99b018 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_service_jni.h @@ -0,0 +1,51 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_ + +#include + +#include "mediapipe/framework/graph_service.h" +#include "mediapipe/framework/packet.h" + +namespace mediapipe { +namespace android { + +// Support class for handling graph services in JNI. +// It keeps the context argument opaque and avoids exposing the entire +// Graph to service JNI implementations. +class GraphServiceHelper { + public: + // Call this static method to provide a native service object in response to + // a call to GraphService#installServiceObject in Java. + // The context_handle parameter should be the same as passed to + // installServiceObject. + template + static void SetServiceObject(jlong context_handle, + const GraphService& service, + std::shared_ptr object) { + SetServicePacket(context_handle, service, + MakePacket>(std::move(object))); + } + + private: + static void SetServicePacket(jlong context_handle, + const GraphServiceBase& service, Packet packet); +}; + +} // namespace android +} // namespace mediapipe + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc new file mode 100644 index 000000000..5f41d9487 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc @@ -0,0 +1,49 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h" + +#include "mediapipe/gpu/gl_calculator_helper.h" +#include "mediapipe/gpu/gl_texture_buffer.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" + +using mediapipe::GlTextureBufferSharedPtr; + +JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)( + JNIEnv* env, jobject thiz, jlong nativeHandle) { + GlTextureBufferSharedPtr* buffer = + reinterpret_cast(nativeHandle); + delete buffer; +} + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetTextureName)( + JNIEnv* env, jobject thiz, jlong nativeHandle) { + GlTextureBufferSharedPtr* buffer = + reinterpret_cast(nativeHandle); + return (*buffer)->name(); +} + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetWidth)( + JNIEnv* env, jobject thiz, jlong nativeHandle) { + GlTextureBufferSharedPtr* buffer = + reinterpret_cast(nativeHandle); + return (*buffer)->width(); +} + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)( + JNIEnv* env, jobject thiz, jlong nativeHandle) { + GlTextureBufferSharedPtr* buffer = + reinterpret_cast(nativeHandle); + return (*buffer)->height(); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h new file mode 100644 index 000000000..ce6bbcbc7 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h @@ -0,0 +1,44 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define GRAPH_TEXTURE_FRAME_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_GraphTextureFrame_##METHOD_NAME + +// Releases a native mediapipe::GpuBuffer. +JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)( + JNIEnv* env, jobject thiz, jlong nativeHandle); + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetTextureName)( + JNIEnv* env, jobject thiz, jlong nativeHandle); + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetWidth)( + JNIEnv* env, jobject thiz, jlong nativeHandle); + +JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)( + JNIEnv* env, jobject thiz, jlong nativeHandle); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.cc new file mode 100644 index 000000000..d383a7b0b --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.cc @@ -0,0 +1,147 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" + +#include + +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/port/logging.h" + +namespace { + +ABSL_CONST_INIT absl::Mutex g_jvm_mutex(absl::kConstInit); +JavaVM* g_jvm GUARDED_BY(g_jvm_mutex); + +class JvmThread { + public: + explicit JvmThread(JavaVM* jvm) { + jvm_ = jvm; + attached_ = false; + jni_env_ = nullptr; + int get_env_stat = + jvm_->GetEnv(reinterpret_cast(&jni_env_), JNI_VERSION_1_6); + // TODO: report the error back to Java layer. + switch (get_env_stat) { + case JNI_OK: + break; + case JNI_EDETACHED: + LOG(INFO) << "GetEnv: not attached"; + if (jvm_->AttachCurrentThread( +#ifdef __ANDROID__ + &jni_env_, +#else + reinterpret_cast(&jni_env_), +#endif // __ANDROID__ + nullptr) != 0) { + LOG(ERROR) << "Failed to attach to java thread."; + break; + } + attached_ = true; + break; + case JNI_EVERSION: + LOG(ERROR) << "GetEnv: jni version not supported."; + break; + default: + LOG(ERROR) << "GetEnv: unknown status."; + break; + } + } + + ~JvmThread() { + if (attached_) { + jvm_->DetachCurrentThread(); + } + } + + JNIEnv* GetEnv() const { return jni_env_; } + + private: + bool attached_; + JavaVM* jvm_; + JNIEnv* jni_env_; +}; + +// Since current android abi doesn't have pthread_local, we have to rely on +// pthread functions to achieve the detachment of java thread when native thread +// exits (see: http://developer.android.com/training/articles/perf-jni.html). +static pthread_key_t jvm_thread_key; +static pthread_once_t key_once = PTHREAD_ONCE_INIT; + +static void ThreadExitCallback(void* key_value) { + JvmThread* jvm_thread = reinterpret_cast(key_value); + // Detach the thread when thread exits. + LOG(INFO) << "Exiting thread. Detach thread."; + delete jvm_thread; +} + +void MakeKey() { pthread_key_create(&jvm_thread_key, ThreadExitCallback); } + +// Returns the global Java VM instance. +JavaVM* GetJavaVM() { + absl::MutexLock lock(&g_jvm_mutex); + return g_jvm; +} + +} // namespace + +namespace mediapipe { + +namespace android { + +std::string JStringToStdString(JNIEnv* env, jstring jstr) { + const char* s = env->GetStringUTFChars(jstr, 0); + if (!s) { + return std::string(); + } + std::string str(s); + env->ReleaseStringUTFChars(jstr, s); + return str; +} + +} // namespace android + +namespace java { + +bool HasJavaVM() { + absl::MutexLock lock(&g_jvm_mutex); + return g_jvm != nullptr; +} + +bool SetJavaVM(JNIEnv* env) { + absl::MutexLock lock(&g_jvm_mutex); + if (!g_jvm) { + if (env->GetJavaVM(&g_jvm) != JNI_OK) { + LOG(ERROR) << "Can not get the Java VM instance!"; + g_jvm = nullptr; + return false; + } + } + return true; +} + +JNIEnv* GetJNIEnv() { + pthread_once(&key_once, MakeKey); + JvmThread* jvm_thread = + reinterpret_cast(pthread_getspecific(jvm_thread_key)); + if (jvm_thread == nullptr) { + jvm_thread = new JvmThread(GetJavaVM()); + pthread_setspecific(jvm_thread_key, jvm_thread); + } + return jvm_thread->GetEnv(); +} + +} // namespace java + +} // namespace mediapipe diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.h new file mode 100644 index 000000000..81a44919d --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/jni_util.h @@ -0,0 +1,46 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_ + +#include + +#include + +namespace mediapipe { + +namespace android { + +std::string JStringToStdString(JNIEnv* env, jstring jstr); + +} // namespace android + +namespace java { + +// Sets the global Java VM instance, if it is not set yet. +// Returns true on success. +bool SetJavaVM(JNIEnv* env); + +// Determines if the global Java VM instance is available. +bool HasJavaVM(); + +// Returns the current JNI environment. +JNIEnv* GetJNIEnv(); + +} // namespace java + +} // namespace mediapipe + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.cc new file mode 100644 index 000000000..60ca862d2 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.cc @@ -0,0 +1,54 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_context_jni.h" + +#include "absl/strings/str_format.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" + +// Releases a native mediapipe packet. +JNIEXPORT void JNICALL PACKET_METHOD(nativeReleasePacket)(JNIEnv* env, + jobject thiz, + jlong packet) { + // Removes the packet from the mediapipe context. + mediapipe::android::Graph::RemovePacket(packet); +} + +JNIEXPORT jlong JNICALL PACKET_METHOD(nativeGetTimestamp)(JNIEnv* env, + jobject thiz, + jlong packet) { + return mediapipe::android::Graph::GetPacketFromHandle(packet) + .Timestamp() + .Value(); +} + +JNIEXPORT jlong JNICALL PACKET_METHOD(nativeCopyPacket)(JNIEnv* env, + jobject thiz, + jlong packet) { + auto mediapipe_graph = + mediapipe::android::Graph::GetContextFromHandle(packet); + mediapipe::Packet mediapipe_packet = + mediapipe::android::Graph::GetPacketFromHandle(packet); + return mediapipe_graph->WrapPacketIntoContext(mediapipe_packet); +} + +jobject CreateJavaPacket(JNIEnv* env, jclass packet_cls, jlong packet) { + jmethodID createMethod = env->GetStaticMethodID( + packet_cls, "create", + absl::StrFormat( + "(J)L%s;", + std::string(mediapipe::android::Graph::kJavaPacketClassName)) + .c_str()); + return env->CallStaticObjectMethod(packet_cls, createMethod, packet); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.h new file mode 100644 index 000000000..44d8bb137 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_context_jni.h @@ -0,0 +1,49 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define PACKET_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_Packet_##METHOD_NAME + +// Releases a native mediapipe packet. +JNIEXPORT void JNICALL PACKET_METHOD(nativeReleasePacket)(JNIEnv* env, + jobject thiz, + jlong packet); + +// Returns the timestamp of the packet. +JNIEXPORT jlong JNICALL PACKET_METHOD(nativeGetTimestamp)(JNIEnv* env, + jobject thiz, + jlong packet); + +// Make a copy of a mediapipe packet, basically increase the reference count. +JNIEXPORT jlong JNICALL PACKET_METHOD(nativeCopyPacket)(JNIEnv* env, + jobject thiz, + jlong packet); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +// Calls the java method to create an instance of java Packet. +jobject CreateJavaPacket(JNIEnv* env, jclass packet_cls, jlong packet); + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc new file mode 100644 index 000000000..149d46eb1 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc @@ -0,0 +1,389 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h" + +#include +#include + +#include "mediapipe/framework/camera_intrinsics.h" +#include "mediapipe/framework/formats/image_format.pb.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/time_series_header.pb.h" +#include "mediapipe/framework/formats/video_stream_header.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" +#ifndef MEDIAPIPE_DISABLE_GPU +#include "mediapipe/gpu/gl_calculator_helper.h" +#endif // !defined(MEDIAPIPE_DISABLE_GPU) + +namespace { + +template +int64_t CreatePacketScalar(jlong context, const T& value) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + mediapipe::Packet packet = mediapipe::Adopt(new T(value)); + return mediapipe_graph->WrapPacketIntoContext(packet); +} + +// Creates a new internal::PacketWithContext object, and returns the native +// handle. +int64_t CreatePacketWithContext(jlong context, + const mediapipe::Packet& packet) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + return mediapipe_graph->WrapPacketIntoContext(packet); +} + +} // namespace + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateReferencePacket)( + JNIEnv* env, jobject thiz, jlong context, jlong packet) { + auto mediapipe_graph = reinterpret_cast(context); + mediapipe::Packet mediapipe_packet = + mediapipe::android::Graph::GetPacketFromHandle(packet); + auto reference_packet = mediapipe::AdoptAsUniquePtr( + new mediapipe::SyncedPacket(mediapipe_packet)); + // assigned the initial value of the packet reference. + return mediapipe_graph->WrapPacketIntoContext(reference_packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImage)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height) { + const void* data = env->GetDirectBufferAddress(byte_buffer); + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::SRGB, width, height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + if (buffer_size != image_frame->PixelDataSize()) { + LOG(ERROR) << "The input image buffer should have 4 bytes alignment."; + LOG(ERROR) << "Buffer size: " << buffer_size + << ", Buffer size needed: " << image_frame->PixelDataSize() + << ", Image width: " << width; + return 0L; + } + std::memcpy(image_frame->MutablePixelData(), data, + image_frame->PixelDataSize()); + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImageFromRgba)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height) { + const uint8_t* rgba_data = + static_cast(env->GetDirectBufferAddress(byte_buffer)); + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::SRGB, width, height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + if (buffer_size != width * height * 4) { + LOG(ERROR) << "Please check the input buffer size."; + LOG(ERROR) << "Buffer size: " << buffer_size + << ", Buffer size needed: " << width * height * 4 + << ", Image width: " << width; + return 0L; + } + mediapipe::android::RgbaToRgb(rgba_data, width * 4, width, height, + image_frame->MutablePixelData(), + image_frame->WidthStep()); + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGrayscaleImage)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height) { + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::GRAY8, width, height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + if (buffer_size != width * height) { + LOG(ERROR) << "Please check the input buffer size."; + LOG(ERROR) << "Buffer size: " << buffer_size + << ", Buffer size needed: " << width * height + << ", Image height: " << height; + return 0L; + } + + int width_step = image_frame->WidthStep(); + // Copy buffer data to image frame's pixel_data_. + const char* src_row = + reinterpret_cast(env->GetDirectBufferAddress(byte_buffer)); + char* dst_row = reinterpret_cast(image_frame->MutablePixelData()); + for (int i = height; i > 0; --i) { + std::memcpy(dst_row, src_row, width); + src_row += width; + dst_row += width_step; + } + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbaImageFrame)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height) { + const void* rgba_data = env->GetDirectBufferAddress(byte_buffer); + auto image_frame = absl::make_unique<::mediapipe::ImageFrame>( + mediapipe::ImageFormat::SRGBA, width, height, + ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary); + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + if (buffer_size != image_frame->PixelDataSize()) { + LOG(ERROR) << "Please check the input buffer size."; + LOG(ERROR) << "Buffer size: " << buffer_size + << ", Buffer size needed: " << image_frame->PixelDataSize() + << ", Image width: " << width; + return 0L; + } + std::memcpy(image_frame->MutablePixelData(), rgba_data, + image_frame->PixelDataSize()); + mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateAudioPacket)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data, + jint num_channels, jint num_samples) { + if (env->GetArrayLength(data) != num_channels * num_samples * 2) { + LOG(ERROR) << "Please check the audio data size, " + "has to be num_channels * num_samples * 2 = " + << num_channels * num_samples * 2; + return 0L; + } + std::unique_ptr<::mediapipe::Matrix> matrix( + new ::mediapipe::Matrix(num_channels, num_samples)); + // Note, audio_data_ref is really a const jbyte* but this clashes with the + // the expectation of ReleaseByteArrayElements below. + jbyte* audio_data_ref = env->GetByteArrayElements(data, nullptr); + // Preparing and normalize the audio data. + // kMultiplier is same as what used in av_sync_media_decoder.cc. + static const float kMultiplier = 1.f / (1 << 15); + // We try to not assume the Endian order of the data. + const uint8_t* audio_sample = reinterpret_cast(audio_data_ref); + for (int sample = 0; sample < num_samples; ++sample) { + for (int channel = 0; channel < num_channels; ++channel) { + int16_t value = (audio_sample[1] & 0xff) << 8 | audio_sample[0]; + (*matrix)(channel, sample) = kMultiplier * value; + audio_sample += 2; + } + } + env->ReleaseByteArrayElements(data, audio_data_ref, JNI_ABORT); + mediapipe::Packet packet = mediapipe::Adopt(matrix.release()); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt16)(JNIEnv* env, + jobject thiz, + jlong context, + jshort value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32)(JNIEnv* env, + jobject thiz, + jlong context, + jint value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt64)(JNIEnv* env, + jobject thiz, + jlong context, + jlong value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32)( + JNIEnv* env, jobject thiz, jlong context, jfloat value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat64)( + JNIEnv* env, jobject thiz, jlong context, jdouble value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateBool)( + JNIEnv* env, jobject thiz, jlong context, jboolean value) { + return CreatePacketScalar(context, value); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateString)( + JNIEnv* env, jobject thiz, jlong context, jstring value) { + return CreatePacketScalar( + context, mediapipe::android::JStringToStdString(env, value)); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateVideoHeader)( + JNIEnv* env, jobject thiz, jlong context, jint width, jint height) { + mediapipe::VideoHeader header; + header.format = mediapipe::ImageFormat::SRGB; + header.width = width; + header.height = height; + return CreatePacketScalar(context, header); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateTimeSeriesHeader)( + JNIEnv* env, jobject thiz, jlong context, jint num_channels, + jdouble sample_rate) { + mediapipe::TimeSeriesHeader header; + header.set_num_channels(num_channels); + header.set_sample_rate(sample_rate); + return CreatePacketScalar(context, header); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateMatrix)( + JNIEnv* env, jobject thiz, jlong context, jint rows, jint cols, + jfloatArray data) { + if (env->GetArrayLength(data) != rows * cols) { + LOG(ERROR) << "Please check the matrix data size, " + "has to be rows * cols = " + << rows * cols; + return 0L; + } + std::unique_ptr<::mediapipe::Matrix> matrix( + new ::mediapipe::Matrix(rows, cols)); + // The java and native has the same byte order, by default is little Endian, + // we can safely copy data directly, we have tests to cover this. + env->GetFloatArrayRegion(data, 0, rows * cols, matrix->data()); + mediapipe::Packet packet = mediapipe::Adopt(matrix.release()); + return CreatePacketWithContext(context, packet); +} + +#ifndef MEDIAPIPE_DISABLE_GPU + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGpuBuffer)( + JNIEnv* env, jobject thiz, jlong context, jint name, jint width, + jint height, jobject texture_release_callback) { + mediapipe::android::Graph* mediapipe_graph = + reinterpret_cast(context); + auto* gpu_resources = mediapipe_graph->GetGpuResources(); + CHECK(gpu_resources) << "Cannot create a mediapipe::GpuBuffer packet on a " + "graph without GPU support"; + mediapipe::GlTextureBuffer::DeletionCallback cc_callback; + + if (texture_release_callback) { + // TODO: see if this can be cached. + // Note: we don't get this from the object because people may pass a + // subclass of PacketCreator, and the method is private. + jclass my_class = + env->FindClass("com/google/mediapipe/framework/PacketCreator"); + jmethodID release_method = + env->GetMethodID(my_class, "releaseWithSyncToken", + "(JL" + "com/google/mediapipe/framework/TextureReleaseCallback" + ";)V"); + CHECK(release_method); + env->DeleteLocalRef(my_class); + + jobject java_callback = env->NewGlobalRef(texture_release_callback); + jobject packet_creator = env->NewGlobalRef(thiz); + cc_callback = [mediapipe_graph, packet_creator, release_method, + java_callback](mediapipe::GlSyncToken release_token) { + JNIEnv* env = mediapipe::java::GetJNIEnv(); + + jlong raw_token = reinterpret_cast( + new mediapipe::GlSyncToken(std::move(release_token))); + env->CallVoidMethod(packet_creator, release_method, raw_token, + java_callback); + + // Note that this callback is called only once, and is not saved + // anywhere else, so we can and should delete it here. + env->DeleteGlobalRef(java_callback); + env->DeleteGlobalRef(packet_creator); + }; + } + mediapipe::Packet packet = mediapipe::MakePacket( + mediapipe::GlTextureBuffer::Wrap(GL_TEXTURE_2D, name, width, height, + mediapipe::GpuBufferFormat::kBGRA32, + cc_callback)); + return CreatePacketWithContext(context, packet); +} + +#endif // !defined(MEDIAPIPE_DISABLE_GPU) + +// TODO: Add vector creators. + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32Array)( + JNIEnv* env, jobject thiz, jlong context, jfloatArray data) { + jsize count = env->GetArrayLength(data); + jfloat* data_ref = env->GetFloatArrayElements(data, nullptr); + float* floats = new float[count]; + // jfloat is a "machine-dependent native type" which represents a 32-bit + // float. C++ makes no guarantees about the size of floating point types, and + // some exotic architectures don't even have 32-bit floats (or even binary + // floats), but on all architectures we care about this is a float. + static_assert(std::is_same::value, "jfloat must be float"); + std::memcpy(floats, data_ref, count * sizeof(float)); + env->ReleaseFloatArrayElements(data, data_ref, JNI_ABORT); + + // The reinterpret_cast is needed to make the Adopt template recognize + // that this is an array - this way Holder will call delete[]. + mediapipe::Packet packet = + mediapipe::Adopt(reinterpret_cast(floats)); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)( + JNIEnv* env, jobject thiz, jlong context, jintArray data) { + jsize count = env->GetArrayLength(data); + jint* data_ref = env->GetIntArrayElements(data, nullptr); + int32_t* ints = new int32_t[count]; + static_assert(std::is_same::value, "jint must be int32_t"); + std::memcpy(ints, data_ref, count * sizeof(int32_t)); + env->ReleaseIntArrayElements(data, data_ref, JNI_ABORT); + + // The reinterpret_cast is needed to make the Adopt template recognize + // that this is an array - this way Holder will call delete[]. + mediapipe::Packet packet = + mediapipe::Adopt(reinterpret_cast(ints)); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data) { + jsize count = env->GetArrayLength(data); + jbyte* data_ref = env->GetByteArrayElements(data, nullptr); + mediapipe::Packet packet = mediapipe::Adopt( + new std::string(reinterpret_cast(data_ref), count)); + env->ReleaseByteArrayElements(data, data_ref, JNI_ABORT); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCalculatorOptions)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data) { + jsize count = env->GetArrayLength(data); + jbyte* data_ref = env->GetByteArrayElements(data, nullptr); + auto options = absl::make_unique(); + if (!options->ParseFromArray(data_ref, count)) { + LOG(ERROR) << "Parsing binary-encoded CalculatorOptions failed."; + return 0L; + } + mediapipe::Packet packet = mediapipe::Adopt(options.release()); + env->ReleaseByteArrayElements(data, data_ref, JNI_ABORT); + return CreatePacketWithContext(context, packet); +} + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCameraIntrinsics)( + JNIEnv* env, jobject thiz, jlong context, jfloat fx, jfloat fy, jfloat cx, + jfloat cy, jfloat width, jfloat height) { + mediapipe::Packet packet = + mediapipe::MakePacket(fx, fy, cx, cy, width, height); + return CreatePacketWithContext(context, packet); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.h new file mode 100644 index 000000000..a453d1d06 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_creator_jni.h @@ -0,0 +1,116 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define PACKET_CREATOR_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_PacketCreator_##METHOD_NAME + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateReferencePacket)( + JNIEnv* env, jobject thiz, jlong context, jlong packet); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImage)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbaImageFrame)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImageFromRgba)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGrayscaleImage)( + JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width, + jint height); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateAudioPacket)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data, + jint num_channels, jint num_samples); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt16)(JNIEnv* env, + jobject thiz, + jlong context, + jshort value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32)(JNIEnv* env, + jobject thiz, + jlong context, + jint value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt64)(JNIEnv* env, + jobject thiz, + jlong context, + jlong value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32)( + JNIEnv* env, jobject thiz, jlong context, jfloat value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat64)( + JNIEnv* env, jobject thiz, jlong context, jdouble value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateBool)(JNIEnv* env, + jobject thiz, + jlong context, + jboolean value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateString)( + JNIEnv* env, jobject thiz, jlong context, jstring value); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateVideoHeader)( + JNIEnv* env, jobject thiz, jlong context, jint width, jint height); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateTimeSeriesHeader)( + JNIEnv* env, jobject thiz, jlong context, jint num_channels, + jdouble sample_rate); + +// Creates a MediaPipe::Matrix packet using the float array data. +// The data must in column major order. +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateMatrix)( + JNIEnv* env, jobject thiz, jlong context, jint rows, jint cols, + jfloatArray data); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGpuBuffer)( + JNIEnv* env, jobject thiz, jlong context, jint name, jint width, + jint height, jobject texture_release_callback); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32Array)( + JNIEnv* env, jobject thiz, jlong context, jfloatArray data); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)( + JNIEnv* env, jobject thiz, jlong context, jintArray data); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCalculatorOptions)( + JNIEnv* env, jobject thiz, jlong context, jbyteArray data); + +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCameraIntrinsics)( + JNIEnv* env, jobject thiz, jlong context, jfloat fx, jfloat fy, jfloat cx, + jfloat cy, jfloat width, jfloat height); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc new file mode 100644 index 000000000..9940d186e --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc @@ -0,0 +1,347 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.h" + +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/time_series_header.pb.h" +#include "mediapipe/framework/formats/video_stream_header.h" +#include "mediapipe/framework/port/core_proto_inc.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" +#ifndef MEDIAPIPE_DISABLE_GPU +#include "mediapipe/gpu/gl_calculator_helper.h" +#endif // !defined(MEDIAPIPE_DISABLE_GPU) + +namespace { + +template +const T& GetFromNativeHandle(int64_t packet_handle) { + return mediapipe::android::Graph::GetPacketFromHandle(packet_handle).Get(); +} +} // namespace + +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetPacketFromReference)( + JNIEnv* env, jobject thiz, jlong packet) { + mediapipe::Packet mediapipe_packet = + mediapipe::android::Graph::GetPacketFromHandle(packet) + .Get>() + ->Get(); + auto mediapipe_graph = + mediapipe::android::Graph::GetContextFromHandle(packet); + return mediapipe_graph->WrapPacketIntoContext(mediapipe_packet); +} + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetPairPackets)( + JNIEnv* env, jobject thiz, jlong packet) { + jlongArray return_handles = env->NewLongArray(2); + auto pair_packets = + GetFromNativeHandle>( + packet); + auto mediapipe_graph = + mediapipe::android::Graph::GetContextFromHandle(packet); + int64_t handles[2]; + handles[0] = mediapipe_graph->WrapPacketIntoContext(pair_packets.first); + handles[1] = mediapipe_graph->WrapPacketIntoContext(pair_packets.second); + env->SetLongArrayRegion(return_handles, 0, 2, + reinterpret_cast(handles)); + return return_handles; +} + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetVectorPackets)( + JNIEnv* env, jobject thiz, jlong packet) { + auto vector_packets = + GetFromNativeHandle>(packet); + auto mediapipe_graph = + mediapipe::android::Graph::GetContextFromHandle(packet); + jlongArray return_handles = env->NewLongArray(vector_packets.size()); + std::vector handles(vector_packets.size()); + for (int i = 0; i < vector_packets.size(); ++i) { + handles[i] = mediapipe_graph->WrapPacketIntoContext(vector_packets[i]); + } + env->SetLongArrayRegion(return_handles, 0, handles.size(), + reinterpret_cast(&(handles[0]))); + return return_handles; +} + +JNIEXPORT jshort JNICALL PACKET_GETTER_METHOD(nativeGetInt16)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetInt32)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetInt64)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jfloat JNICALL PACKET_GETTER_METHOD(nativeGetFloat32)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(nativeGetFloat64)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetBool)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet); +} + +JNIEXPORT jstring JNICALL PACKET_GETTER_METHOD(nativeGetString)(JNIEnv* env, + jobject thiz, + jlong packet) { + const std::string& value = GetFromNativeHandle(packet); + return env->NewStringUTF(value.c_str()); +} + +JNIEXPORT jbyteArray JNICALL +PACKET_GETTER_METHOD(nativeGetBytes)(JNIEnv* env, jobject thiz, jlong packet) { + const std::string& value = GetFromNativeHandle(packet); + jbyteArray data = env->NewByteArray(value.length()); + env->SetByteArrayRegion(data, 0, value.length(), + reinterpret_cast(value.c_str())); + return data; +} + +JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetProtoBytes)( + JNIEnv* env, jobject thiz, jlong packet) { + mediapipe::Packet mediapipe_packet = + mediapipe::android::Graph::GetPacketFromHandle(packet); + const auto& proto_message = mediapipe_packet.GetProtoMessageLite(); + std::string serialized; + proto_message.SerializeToString(&serialized); + jbyteArray data = env->NewByteArray(serialized.size()); + env->SetByteArrayRegion(data, 0, serialized.size(), + reinterpret_cast(serialized.c_str())); + return data; +} + +JNIEXPORT jshortArray JNICALL PACKET_GETTER_METHOD(nativeGetInt16Vector)( + JNIEnv* env, jobject thiz, jlong packet) { + const std::vector& values = + GetFromNativeHandle>(packet); + jshortArray result = env->NewShortArray(values.size()); + env->SetShortArrayRegion(result, 0, values.size(), &(values[0])); + return result; +} + +JNIEXPORT jintArray JNICALL PACKET_GETTER_METHOD(nativeGetInt32Vector)( + JNIEnv* env, jobject thiz, jlong packet) { + const std::vector& values = + GetFromNativeHandle>(packet); + jintArray result = env->NewIntArray(values.size()); + env->SetIntArrayRegion(result, 0, values.size(), &(values[0])); + return result; +} + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetInt64Vector)( + JNIEnv* env, jobject thiz, jlong packet) { + const std::vector& values = + GetFromNativeHandle>(packet); + jlongArray result = env->NewLongArray(values.size()); + // 64 bit builds treat jlong as long long, and int64_t as long int, although + // both are 64 bits, but still need to use the reinterpret_cast to avoid the + // compiling error. + env->SetLongArrayRegion(result, 0, values.size(), + reinterpret_cast(&(values[0]))); + return result; +} + +JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat32Vector)( + JNIEnv* env, jobject thiz, jlong packet) { + const std::vector& values = + GetFromNativeHandle>(packet); + jfloatArray result = env->NewFloatArray(values.size()); + env->SetFloatArrayRegion(result, 0, values.size(), &(values[0])); + return result; +} + +JNIEXPORT jdoubleArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat64Vector)( + JNIEnv* env, jobject thiz, jlong packet) { + const std::vector& values = + GetFromNativeHandle>(packet); + jdoubleArray result = env->NewDoubleArray(values.size()); + env->SetDoubleArrayRegion(result, 0, values.size(), &(values[0])); + return result; +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageWidth)(JNIEnv* env, + jobject thiz, + jlong packet) { + const ::mediapipe::ImageFrame& image = + GetFromNativeHandle<::mediapipe::ImageFrame>(packet); + return image.Width(); +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageHeight)( + JNIEnv* env, jobject thiz, jlong packet) { + const ::mediapipe::ImageFrame& image = + GetFromNativeHandle<::mediapipe::ImageFrame>(packet); + return image.Height(); +} + +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetImageData)( + JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer) { + const ::mediapipe::ImageFrame& image = + GetFromNativeHandle<::mediapipe::ImageFrame>(packet); + uint8* data = static_cast(env->GetDirectBufferAddress(byte_buffer)); + + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + + // Assume byte buffer stores pixel data contiguously. + const int expected_buffer_size = image.Width() * image.Height() * + image.ByteDepth() * image.NumberOfChannels(); + if (buffer_size != expected_buffer_size) { + LOG(ERROR) << "Expected buffer size " << expected_buffer_size + << " got: " << buffer_size << ", width " << image.Width() + << ", height " << image.Height() << ", channels " + << image.NumberOfChannels(); + return false; + } + + image.CopyToBuffer(data, expected_buffer_size); + return true; +} + +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)( + JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer) { + const ::mediapipe::ImageFrame& image = + GetFromNativeHandle<::mediapipe::ImageFrame>(packet); + uint8_t* rgba_data = + static_cast(env->GetDirectBufferAddress(byte_buffer)); + int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); + if (buffer_size != image.Width() * image.Height() * 4) { + LOG(ERROR) << "Buffer size has to be width*height*4\n" + << "Image width: " << image.Width() + << ", Image height: " << image.Height() + << ", Buffer size: " << buffer_size << ", Buffer size needed: " + << image.Width() * image.Height() * 4; + return false; + } + mediapipe::android::RgbToRgba(image.PixelData(), image.WidthStep(), + image.Width(), image.Height(), rgba_data, + image.Width() * 4, 255); + return true; +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderWidth)( + JNIEnv* env, jobject thiz, jlong packet) { + return GetFromNativeHandle(packet).width; +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderHeight)( + JNIEnv* env, jobject thiz, jlong packet) { + return GetFromNativeHandle(packet).height; +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD( + nativeGetTimeSeriesHeaderNumChannels)(JNIEnv* env, jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet) + .num_channels(); +} + +JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD( + nativeGetTimeSeriesHeaderSampleRate)(JNIEnv* env, jobject thiz, + jlong packet) { + return GetFromNativeHandle(packet).sample_rate(); +} + +JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetAudioData)( + JNIEnv* env, jobject thiz, jlong packet) { + const ::mediapipe::Matrix& audio_mat = + GetFromNativeHandle<::mediapipe::Matrix>(packet); + int num_channels = audio_mat.rows(); + int num_samples = audio_mat.cols(); + int data_size = num_channels * num_samples * 2; + const int kMultiplier = 1 << 15; + jbyteArray byte_data = env->NewByteArray(data_size); + int offset = 0; + for (int sample = 0; sample < num_samples; ++sample) { + for (int channel = 0; channel < num_channels; ++channel) { + int16 value = + static_cast(audio_mat(channel, sample) * kMultiplier); + // The java and native has the same byte order, by default is little + // Endian, we can safely copy data directly, we have tests to cover this. + env->SetByteArrayRegion(byte_data, offset, 2, + reinterpret_cast(&value)); + offset += 2; + } + } + return byte_data; +} + +JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetMatrixData)( + JNIEnv* env, jobject thiz, jlong packet) { + const ::mediapipe::Matrix& audio_mat = + GetFromNativeHandle<::mediapipe::Matrix>(packet); + int rows = audio_mat.rows(); + int cols = audio_mat.cols(); + jfloatArray float_data = env->NewFloatArray(rows * cols); + env->SetFloatArrayRegion(float_data, 0, rows * cols, + reinterpret_cast(audio_mat.data())); + return float_data; +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixRows)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle<::mediapipe::Matrix>(packet).rows(); +} + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixCols)(JNIEnv* env, + jobject thiz, + jlong packet) { + return GetFromNativeHandle<::mediapipe::Matrix>(packet).cols(); +} + +#ifndef MEDIAPIPE_DISABLE_GPU + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetGpuBufferName)( + JNIEnv* env, jobject thiz, jlong packet) { + const mediapipe::GpuBuffer& gpu_buffer = + GetFromNativeHandle(packet); + // gpu_buffer.name() returns a GLuint. Make sure the cast to jint is safe. + static_assert(sizeof(GLuint) <= sizeof(jint), + "The cast to jint may truncate GLuint"); + return static_cast(gpu_buffer.GetGlTextureBufferSharedPtr()->name()); +} + +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetGpuBuffer)(JNIEnv* env, + jobject thiz, + jlong packet) { + const mediapipe::GpuBuffer& gpu_buffer = + GetFromNativeHandle(packet); + const mediapipe::GlTextureBufferSharedPtr& ptr = + gpu_buffer.GetGlTextureBufferSharedPtr(); + ptr->WaitUntilComplete(); + return reinterpret_cast( + new mediapipe::GlTextureBufferSharedPtr(ptr)); +} + +#endif // !defined(MEDIAPIPE_DISABLE_GPU) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.h new file mode 100644 index 000000000..cb35bac66 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/packet_getter_jni.h @@ -0,0 +1,157 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define PACKET_GETTER_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_PacketGetter_##METHOD_NAME + +// Get a native mediapipe packet. +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetPacketFromReference)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetPairPackets)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetVectorPackets)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jshort JNICALL PACKET_GETTER_METHOD(nativeGetInt16)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetInt32)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetInt64)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jfloat JNICALL PACKET_GETTER_METHOD(nativeGetFloat32)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(nativeGetFloat64)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetBool)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jstring JNICALL PACKET_GETTER_METHOD(nativeGetString)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetBytes)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetProtoBytes)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jshortArray JNICALL PACKET_GETTER_METHOD(nativeGetInt16Vector)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jintArray JNICALL PACKET_GETTER_METHOD(nativeGetInt32Vector)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetInt64Vector)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat32Vector)( + JNIEnv* env, jobject thiz, jlong packet); + +JNIEXPORT jdoubleArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat64Vector)( + JNIEnv* env, jobject thiz, jlong packet); + +// ImageFrame jni functions. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageWidth)(JNIEnv* env, + jobject thiz, + jlong packet); + +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageHeight)(JNIEnv* env, + jobject thiz, + jlong packet); + +// Before calling this, the byte_buffer needs to have the correct allocated +// size. +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetImageData)( + JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer); + +// Before calling this, the byte_buffer needs to have the correct allocated +// size. +JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)( + JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer); + +// Returns the width in VideoHeader packet. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderWidth)( + JNIEnv* env, jobject thiz, jlong packet); + +// Returns the height in VideoHeader packet. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderHeight)( + JNIEnv* env, jobject thiz, jlong packet); + +// Gets the byte array from the MediaPipe audio data.. +JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetAudioData)( + JNIEnv* env, jobject thiz, jlong packet); + +// Gets number of channels in time series header packet. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD( + nativeGetTimeSeriesHeaderNumChannels)(JNIEnv* env, jobject thiz, + jlong packet); + +// Gets sampling rate in time series header packet. +JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD( + nativeGetTimeSeriesHeaderSampleRate)(JNIEnv* env, jobject thiz, + jlong packet); + +// Returns the raw float array data for the MediaPipe Matrix. +// Note: MediaPipe::Matrix is column major matrix. +JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetMatrixData)( + JNIEnv* env, jobject thiz, jlong packet); + +// Returns the number of rows of the matrix. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixRows)(JNIEnv* env, + jobject thiz, + jlong packet); + +// Returns the number of cols of the matrix. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixCols)(JNIEnv* env, + jobject thiz, + jlong packet); + +// Returns the GL texture name of the mediapipe::GpuBuffer. +JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetGpuBufferName)( + JNIEnv* env, jobject thiz, jlong packet); + +// Returns a mediapipe::GlTextureBufferSharedPtr*. +// This will survive independently of the packet. +JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetGpuBuffer)(JNIEnv* env, + jobject thiz, + jlong packet); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.cc new file mode 100644 index 000000000..29646a474 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.cc @@ -0,0 +1,131 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/synchronization/mutex.h" +#ifdef __ANDROID__ +#include +#endif // __ANDROID__ + +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/gpu/egl_surface_holder.h" +#include "mediapipe/gpu/gpu_shared_data_internal.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" +#include "mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.h" + +// TODO: CHECK in JNI does not work. Raise exception instead. + +namespace { +mediapipe::EglSurfaceHolder* GetSurfaceHolder(jlong packet) { + return mediapipe::android::Graph::GetPacketFromHandle(packet) + .Get>() + .get(); +} + +mediapipe::GlContext* GetGlContext(jlong context) { + auto mediapipe_graph = reinterpret_cast(context); + mediapipe::GpuResources* gpu_resources = mediapipe_graph->GetGpuResources(); + return gpu_resources ? gpu_resources->gl_context().get() : nullptr; +} +} // namespace + +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetFlipY)( + JNIEnv* env, jobject thiz, jlong packet, jboolean flip) { + mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet); + surface_holder->flip_y = flip; +} + +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)( + JNIEnv* env, jobject thiz, jlong context, jlong packet, jobject surface) { +#ifdef __ANDROID__ + mediapipe::GlContext* gl_context = GetGlContext(context); + CHECK(gl_context) << "GPU shared data not created"; + mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet); + + // ANativeWindow_fromSurface must not be called on the GL thread, it is a + // JNI call. + ANativeWindow* window = nullptr; + if (surface) { + window = ANativeWindow_fromSurface(env, surface); + } + + auto status = gl_context->Run( + [gl_context, surface_holder, surface, window]() -> ::mediapipe::Status { + absl::MutexLock lock(&surface_holder->mutex); + // Must destroy old surface first in case we are assigning the same + // surface. + // TODO: keep a ref to Java object and short-circuit if same? + if (surface_holder->owned) { + // NOTE: according to the eglDestroySurface documentation, the surface + // is destroyed immediately "if it is not current on any thread". This + // surface is only made current by the SurfaceSinkCalculator while it + // holds the surface_holder->mutex, so at this point we know it is not + // current on any thread, and we can rely on it being destroyed + // immediately. + RET_CHECK(eglDestroySurface(gl_context->egl_display(), + surface_holder->surface)) + << "eglDestroySurface failed:" << eglGetError(); + } + EGLSurface egl_surface = EGL_NO_SURFACE; + if (surface) { + EGLint surface_attr[] = {EGL_NONE}; + + egl_surface = eglCreateWindowSurface(gl_context->egl_display(), + gl_context->egl_config(), window, + surface_attr); + RET_CHECK(egl_surface != EGL_NO_SURFACE) + << "eglCreateWindowSurface() returned error:" << eglGetError(); + } + surface_holder->surface = egl_surface; + surface_holder->owned = egl_surface != EGL_NO_SURFACE; + return ::mediapipe::OkStatus(); + }); + MEDIAPIPE_CHECK_OK(status); + + if (window) { + VLOG(2) << "releasing window"; + ANativeWindow_release(window); + } +#else + LOG(FATAL) << "setSurface is only supported on Android"; +#endif // __ANDROID__ +} + +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetEglSurface)( + JNIEnv* env, jobject thiz, jlong context, jlong packet, jlong surface) { + mediapipe::GlContext* gl_context = GetGlContext(context); + CHECK(gl_context) << "GPU shared data not created"; + auto egl_surface = reinterpret_cast(surface); + mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet); + EGLSurface old_surface = EGL_NO_SURFACE; + + { + absl::MutexLock lock(&surface_holder->mutex); + if (surface_holder->owned) { + old_surface = surface_holder->surface; + } + surface_holder->surface = egl_surface; + surface_holder->owned = false; + } + + if (old_surface != EGL_NO_SURFACE) { + MEDIAPIPE_CHECK_OK( + gl_context->Run([gl_context, old_surface]() -> ::mediapipe::Status { + RET_CHECK(eglDestroySurface(gl_context->egl_display(), old_surface)) + << "eglDestroySurface failed:" << eglGetError(); + return ::mediapipe::OkStatus(); + })); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.h new file mode 100644 index 000000000..d3c59f921 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/surface_output_jni.h @@ -0,0 +1,42 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_ +#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define MEDIAPIPE_SURFACE_OUTPUT_METHOD(METHOD_NAME) \ + Java_com_google_mediapipe_framework_SurfaceOutput_##METHOD_NAME + +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetFlipY)( + JNIEnv* env, jobject thiz, jlong packet, jboolean flip); + +#ifdef __ANDROID__ +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)( + JNIEnv* env, jobject thiz, jlong context, jlong packet, jobject surface); +#endif // __ANDROID__ + +JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetEglSurface)( + JNIEnv* env, jobject thiz, jlong context, jlong packet, jlong surface); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/proguard.pgcfg b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/proguard.pgcfg new file mode 100644 index 000000000..699d36eee --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/proguard.pgcfg @@ -0,0 +1,26 @@ +# Additional flags to pass to Proguard when processing a binary that uses +# MediaPipe. + +# Keep public members of our public interfaces. This also prevents the +# obfuscation of the corresponding methods in classes implementing them, +# such as implementations of PacketCallback#process. +-keep public interface com.google.mediapipe.framework.* { + public *; +} + +# This method is invoked by native code. +-keep public class com.google.mediapipe.framework.Packet { + public static *** create(***); + public long getNativeHandle(); + public void release(); +} + +# This method is invoked by native code. +-keep public class com.google.mediapipe.framework.PacketCreator { + *** releaseWithSyncToken(...); +} + +# This method is invoked by native code. +-keep public class com.google.mediapipe.framework.MediaPipeException { + (int, byte[]); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/BUILD new file mode 100644 index 000000000..fc378b4eb --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/BUILD @@ -0,0 +1,32 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) # Apache 2.0 + +# OpenGL utilities. + +# TODO: move Compat.java in here, remove dep +# TODO: add tests +android_library( + name = "glutil", + srcs = glob(["**/*.java"]), + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "@com_google_code_findbugs//jar", + "@com_google_common_flogger//jar", + "@com_google_common_flogger_system_backend//jar", + "@com_google_guava_android//jar", + ], +) diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/CommonShaders.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/CommonShaders.java new file mode 100644 index 000000000..1e601bc9e --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/CommonShaders.java @@ -0,0 +1,81 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import java.nio.FloatBuffer; + +/** Collection of common simple shaders and related resources. */ +public class CommonShaders { + /** + * Shader for rendering a simple geometry. + * + *

Keeps the vertices that are passed in, and applies a transformation to the texture (pass an + * identity matrix if not needed). The transformation uniform is there to support {@link + * android.graphics.SurfaceTexture#getTransformMatrix}. + */ + public static final String VERTEX_SHADER = + "uniform mat4 texture_transform;\n" + + "attribute vec4 position;\n" + + "attribute mediump vec4 texture_coordinate;\n" + + "varying mediump vec2 sample_coordinate;\n" + + "\n" + + "void main() {\n" + + " gl_Position = position;\n" + + " sample_coordinate = (texture_transform * texture_coordinate).xy;\n" + + "}"; + /** Simple fragment shader that renders a 2D texture. */ + public static final String FRAGMENT_SHADER = + "varying mediump vec2 sample_coordinate;\n" + + "uniform sampler2D video_frame;\n" + + "\n" + + "void main() {\n" + + " gl_FragColor = texture2D(video_frame, sample_coordinate);\n" + + "}"; + /** + * Simple fragment shader that renders a texture bound to the {@link + * android.opengl.GLES11Ext#GL_TEXTURE_EXTERNAL_OES} target. See {@link + * android.graphics.SurfaceTexture}. + */ + public static final String FRAGMENT_SHADER_EXTERNAL = + "#extension GL_OES_EGL_image_external : require\n" + + "varying mediump vec2 sample_coordinate;\n" + + "uniform samplerExternalOES video_frame;\n" + + "\n" + + "void main() {\n" + + " gl_FragColor = texture2D(video_frame, sample_coordinate);\n" + + "}"; + /** + * Vertices for a quad that fills the drawing area. + * + *

Can be used directly with {@link android.opengl.GLES10#glDrawArrays}. + */ + public static final FloatBuffer SQUARE_VERTICES = + ShaderUtil.floatBuffer( + -1.0f, -1.0f, // bottom left + 1.0f, -1.0f, // bottom right + -1.0f, 1.0f, // top left + 1.0f, 1.0f // top right + ); + /** + * Vertices for a quad that fills the drawing area, but rotated 90 degrees. + */ + public static final FloatBuffer ROTATED_SQUARE_VERTICES = + ShaderUtil.floatBuffer( + -1.0f, 1.0f, // top left + -1.0f, -1.0f, // bottom left + 1.0f, 1.0f, // top right + 1.0f, -1.0f // bottom right + ); +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/EglManager.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/EglManager.java new file mode 100644 index 000000000..bad7a610f --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/EglManager.java @@ -0,0 +1,408 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.os.Build; +import android.util.Log; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import com.google.mediapipe.framework.Compat; +import java.util.HashMap; +import java.util.Map; +import javax.annotation.Nullable; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; + +/** + * Helper class for creating and managing an {@link EGLContext}. + * + *

Note: Since we want to support API level 16, we cannot rely on {@link android.opengl.EGL14}. + */ +public class EglManager { + private static final String TAG = "EglManager"; + + // These are missing from EGL10. + public static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + public static final int EGL_OPENGL_ES2_BIT = 0x4; + public static final int EGL_OPENGL_ES3_BIT_KHR = 0x00000040; + public static final int EGL_DRAW = 12377; + public static final int EGL_READ = 12378; + + public static final int EGL14_API_LEVEL = android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; + + private EGL10 egl; + private EGLDisplay eglDisplay = EGL10.EGL_NO_DISPLAY; + private EGLConfig eglConfig = null; + private EGLContext eglContext = EGL10.EGL_NO_CONTEXT; + private int[] singleIntArray; // reuse this instead of recreating it each time + private int glVersion; + private long nativeEglContext = 0; + private android.opengl.EGLContext egl14Context = null; + + /** + * Creates an EglManager wrapping a new {@link EGLContext}. + * + * @param parentContext another EGL context with which to share data (e.g. textures); can be an + * {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null. + */ + public EglManager(@Nullable Object parentContext) { + this(parentContext, null); + } + + /** + * Creates an EglManager wrapping a new {@link EGLContext}. + * + * @param parentContext another EGL context with which to share data (e.g. textures); can be an + * {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null. + * @param additionalConfigAttributes a list of attributes for eglChooseConfig to be added to the + * default ones. + */ + public EglManager(@Nullable Object parentContext, @Nullable int[] additionalConfigAttributes) { + singleIntArray = new int[1]; + egl = (EGL10) EGLContext.getEGL(); + eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL10.EGL_NO_DISPLAY) { + throw new RuntimeException("eglGetDisplay failed"); + } + int[] version = new int[2]; + if (!egl.eglInitialize(eglDisplay, version)) { + throw new RuntimeException("eglInitialize failed"); + } + + EGLContext realParentContext; + if (parentContext == null) { + realParentContext = EGL10.EGL_NO_CONTEXT; + } else if (parentContext instanceof EGLContext) { + realParentContext = (EGLContext) parentContext; + } else if (Build.VERSION.SDK_INT >= EGL14_API_LEVEL + && parentContext instanceof android.opengl.EGLContext) { + if (parentContext == EGL14.EGL_NO_CONTEXT) { + realParentContext = EGL10.EGL_NO_CONTEXT; + } else { + realParentContext = egl10ContextFromEgl14Context((android.opengl.EGLContext) parentContext); + } + } else { + throw new RuntimeException("invalid parent context: " + parentContext); + } + + // Try to create an OpenGL ES 3 context first, then fall back on ES 2. + try { + createContext(realParentContext, 3, additionalConfigAttributes); + glVersion = 3; + } catch (RuntimeException e) { + Log.w(TAG, "could not create GLES 3 context: " + e); + createContext(realParentContext, 2, additionalConfigAttributes); + glVersion = 2; + } + } + + /** Returns the managed {@link EGLContext} */ + public EGLContext getContext() { + return eglContext; + } + + /** Returns the native handle to the context. */ + public long getNativeContext() { + if (nativeEglContext == 0) { + grabContextVariants(); + } + return nativeEglContext; + } + + public android.opengl.EGLContext getEgl14Context() { + if (Build.VERSION.SDK_INT < EGL14_API_LEVEL) { + throw new RuntimeException("cannot use EGL14 on API level < 17"); + } + if (egl14Context == null) { + grabContextVariants(); + } + return egl14Context; + } + + public int getGlMajorVersion() { + return glVersion; + } + + /** Makes this the current EGL context on the current thread. */ + public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { + if (!egl.eglMakeCurrent(eglDisplay, drawSurface, readSurface, eglContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** Makes no EGL context current on the current thread. */ + public void makeNothingCurrent() { + if (!egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Creates an {@link EGLSurface} for an Android Surface. + * + * @param surface can be a {@link Surface}, {@link SurfaceTexture}, {@link SurfaceHolder} or + * {@link SurfaceView}. + */ + public EGLSurface createWindowSurface(Object surface) { + if (!(surface instanceof Surface + || surface instanceof SurfaceTexture + || surface instanceof SurfaceHolder + || surface instanceof SurfaceView)) { + throw new RuntimeException("invalid surface: " + surface); + } + + // Create a window surface, and attach it to the Surface we received. + int[] surfaceAttribs = {EGL10.EGL_NONE}; + EGLSurface eglSurface = + egl.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs); + checkEglError("eglCreateWindowSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** + * Creates an {@link EGLSurface} for offscreen rendering, not bound to any Android surface. + * + *

An EGLSurface is always required to make an EGLContext current, and it is bound to the + * OpenGL framebuffer by default. However, the framebuffer can then be bound to other objects in + * OpenGL, such as a texture. + *

If you want to use an EGLContext but do not really care about the EGLSurface, you can use + * a 1x1 surface created with this method. + */ + public EGLSurface createOffscreenSurface(int width, int height) { + int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; + EGLSurface eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs); + checkEglError("eglCreatePbufferSurface"); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + return eglSurface; + } + + /** Releases the resources held by this manager. */ + public void release() { + if (eglDisplay != EGL10.EGL_NO_DISPLAY) { + // Android is unusual in that it uses a reference-counted EGLDisplay. So for + // every eglInitialize() we need an eglTerminate(). + egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); + egl.eglDestroyContext(eglDisplay, eglContext); + egl.eglTerminate(eglDisplay); + } + + eglDisplay = EGL10.EGL_NO_DISPLAY; + eglContext = EGL10.EGL_NO_CONTEXT; + eglConfig = null; + } + + /** Releases an {@link EGLSurface}. */ + public void releaseSurface(EGLSurface eglSurface) { + egl.eglDestroySurface(eglDisplay, eglSurface); + } + + private void createContext( + EGLContext parentContext, int glVersion, @Nullable int[] additionalConfigAttributes) { + eglConfig = getConfig(glVersion, additionalConfigAttributes); + if (eglConfig == null) { + throw new RuntimeException("Unable to find a suitable EGLConfig"); + } + // Try to create an OpenGL ES 3 context first. + int[] contextAttrs = {EGL_CONTEXT_CLIENT_VERSION, glVersion, EGL10.EGL_NONE}; + eglContext = egl.eglCreateContext(eglDisplay, eglConfig, parentContext, contextAttrs); + if (eglContext == null) { + int error = egl.eglGetError(); + throw new RuntimeException( + "Could not create GL context: EGL error: 0x" + + Integer.toHexString(error) + + (error == EGL10.EGL_BAD_CONTEXT + ? ": parent context uses a different version of OpenGL" + : "")); + } + } + + /** + * Gets the native context handle for our context. The EGL10 API does not provide a way to do this + * directly, but we can make our context current and grab the current context handle from native + * code. Also gets the context as an {@link android.opengl.EGLContext}. The underlying native + * object is always the same, but the Android API has two different wrappers for it which are + * completely equivalent internally but completely separate at the Java level. + */ + private void grabContextVariants() { + EGLContext previousContext = egl.eglGetCurrentContext(); + EGLDisplay previousDisplay = egl.eglGetCurrentDisplay(); + EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL_DRAW); + EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL_READ); + EGLSurface tempEglSurface = null; + + if (previousContext != eglContext) { + tempEglSurface = createOffscreenSurface(1, 1); + makeCurrent(tempEglSurface, tempEglSurface); + } + + nativeEglContext = Compat.getCurrentNativeEGLContext(); + if (Build.VERSION.SDK_INT >= EGL14_API_LEVEL) { + egl14Context = android.opengl.EGL14.eglGetCurrentContext(); + } + + if (previousContext != eglContext) { + egl.eglMakeCurrent( + previousDisplay, previousDrawSurface, previousReadSurface, previousContext); + releaseSurface(tempEglSurface); + } + } + + private EGLContext egl10ContextFromEgl14Context(android.opengl.EGLContext context) { + android.opengl.EGLContext previousContext = EGL14.eglGetCurrentContext(); + android.opengl.EGLDisplay previousDisplay = EGL14.eglGetCurrentDisplay(); + android.opengl.EGLSurface previousDrawSurface = EGL14.eglGetCurrentSurface(EGL_DRAW); + android.opengl.EGLSurface previousReadSurface = EGL14.eglGetCurrentSurface(EGL_READ); + + android.opengl.EGLDisplay defaultDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + android.opengl.EGLSurface tempEglSurface = null; + + if (!previousContext.equals(context)) { + int[] surfaceAttribs = {EGL14.EGL_WIDTH, 1, EGL14.EGL_HEIGHT, 1, EGL14.EGL_NONE}; + android.opengl.EGLConfig tempConfig = getThrowawayConfig(defaultDisplay); + tempEglSurface = + EGL14.eglCreatePbufferSurface(previousDisplay, tempConfig, surfaceAttribs, 0); + EGL14.eglMakeCurrent(defaultDisplay, tempEglSurface, tempEglSurface, context); + } + + EGLContext egl10Context = egl.eglGetCurrentContext(); + + if (!previousContext.equals(context)) { + EGL14.eglMakeCurrent( + previousDisplay, previousDrawSurface, previousReadSurface, previousContext); + EGL14.eglDestroySurface(defaultDisplay, tempEglSurface); + } + + return egl10Context; + } + + private android.opengl.EGLConfig getThrowawayConfig(android.opengl.EGLDisplay display) { + int[] attribList = { + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT | EGL10.EGL_WINDOW_BIT, EGL10.EGL_NONE + }; + android.opengl.EGLConfig[] configs = new android.opengl.EGLConfig[1]; + int[] numConfigs = singleIntArray; + if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, 1, numConfigs, 0)) { + throw new IllegalArgumentException("eglChooseConfig failed"); + } + if (numConfigs[0] <= 0) { + throw new IllegalArgumentException("No configs match requested attributes"); + } + return configs[0]; + } + + /** + * Merges two EGL attribute lists. The second list may be null. Values in the second list override + * those with the same key in the first list. + */ + private int[] mergeAttribLists(int[] list1, @Nullable int[] list2) { + if (list2 == null) { + return list1; + } + HashMap attribMap = new HashMap<>(); + for (int[] list : new int[][] {list1, list2}) { + for (int i = 0; i < list.length / 2; i++) { + int key = list[2 * i]; + int value = list[2 * i + 1]; + if (key == EGL10.EGL_NONE) { + break; + } + attribMap.put(key, value); + } + } + int[] merged = new int[attribMap.size() * 2 + 1]; + int i = 0; + for (Map.Entry e : attribMap.entrySet()) { + merged[i++] = e.getKey(); + merged[i++] = e.getValue(); + } + merged[i] = EGL10.EGL_NONE; + return merged; + } + + private EGLConfig getConfig(int glVersion, @Nullable int[] additionalConfigAttributes) { + int[] baseAttribList = { + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_ALPHA_SIZE, 8, + EGL10.EGL_DEPTH_SIZE, 16, + EGL10.EGL_RENDERABLE_TYPE, glVersion == 3 ? EGL_OPENGL_ES3_BIT_KHR : EGL_OPENGL_ES2_BIT, + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT | EGL10.EGL_WINDOW_BIT, + EGL10.EGL_NONE + }; + int[] attribList = mergeAttribLists(baseAttribList, additionalConfigAttributes); + // First count the matching configs. Note that eglChooseConfig will return configs that + // match *or exceed* the requirements, and will put the ones that exceed first! + int[] numConfigs = singleIntArray; + if (!egl.eglChooseConfig(eglDisplay, attribList, null, 0, numConfigs)) { + throw new IllegalArgumentException("eglChooseConfig failed"); + } + + if (numConfigs[0] <= 0) { + throw new IllegalArgumentException("No configs match requested attributes"); + } + + EGLConfig[] configs = new EGLConfig[numConfigs[0]]; + if (!egl.eglChooseConfig(eglDisplay, attribList, configs, configs.length, numConfigs)) { + throw new IllegalArgumentException("eglChooseConfig#2 failed"); + } + + // Try to find a config that matches our bit sizes exactly. + EGLConfig bestConfig = null; + for (EGLConfig config : configs) { + int r = findConfigAttrib(config, EGL10.EGL_RED_SIZE, 0); + int g = findConfigAttrib(config, EGL10.EGL_GREEN_SIZE, 0); + int b = findConfigAttrib(config, EGL10.EGL_BLUE_SIZE, 0); + int a = findConfigAttrib(config, EGL10.EGL_ALPHA_SIZE, 0); + if ((r == 8) && (g == 8) && (b == 8) && (a == 8)) { + bestConfig = config; + break; + } + } + if (bestConfig == null) { + bestConfig = configs[0]; + } + + return bestConfig; + } + + private void checkEglError(String msg) { + int error; + if ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } + + private int findConfigAttrib(EGLConfig config, int attribute, int defaultValue) { + if (egl.eglGetConfigAttrib(eglDisplay, config, attribute, singleIntArray)) { + return singleIntArray[0]; + } + return defaultValue; + } + +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java new file mode 100644 index 000000000..e03bf409d --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ExternalTextureRenderer.java @@ -0,0 +1,143 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import java.nio.FloatBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * Textures from {@link SurfaceTexture} are only supposed to be bound to target {@link + * GLES11Ext#GL_TEXTURE_EXTERNAL_OES}, which is accessed using samplerExternalOES in the shader. + * This means they cannot be used with a regular shader that expects a sampler2D. This class renders + * the external texture to the current framebuffer. By binding the framebuffer to a texture, this + * can be used to convert the input into a normal 2D texture. + */ +public class ExternalTextureRenderer { + private static final FloatBuffer TEXTURE_VERTICES = + ShaderUtil.floatBuffer( + 0.0f, 0.0f, // bottom left + 1.0f, 0.0f, // bottom right + 0.0f, 1.0f, // top left + 1.0f, 1.0f // top right + ); + + private static final FloatBuffer FLIPPED_TEXTURE_VERTICES = + ShaderUtil.floatBuffer( + 0.0f, 1.0f, // top left + 1.0f, 1.0f, // top right + 0.0f, 0.0f, // bottom left + 1.0f, 0.0f // bottom right + ); + + private static final String TAG = "ExternalTextureRend"; // Max length of a tag is 23. + private static final int ATTRIB_POSITION = 1; + private static final int ATTRIB_TEXTURE_COORDINATE = 2; + + private int program = 0; + private int frameUniform; + private int textureTransformUniform; + private float[] textureTransformMatrix = new float[16]; + private boolean flipY; + + /** Call this to setup the shader program before rendering. */ + public void setup() { + Map attributeLocations = new HashMap<>(); + attributeLocations.put("position", ATTRIB_POSITION); + attributeLocations.put("texture_coordinate", ATTRIB_TEXTURE_COORDINATE); + program = + ShaderUtil.createProgram( + CommonShaders.VERTEX_SHADER, + CommonShaders.FRAGMENT_SHADER_EXTERNAL, + attributeLocations); + frameUniform = GLES20.glGetUniformLocation(program, "video_frame"); + textureTransformUniform = GLES20.glGetUniformLocation(program, "texture_transform"); + ShaderUtil.checkGlError("glGetUniformLocation"); + } + + /** + * Flips rendering output vertically, useful for conversion between coordinate systems with + * top-left v.s. bottom-left origins. Effective in subsequent {@link #render(SurfaceTexture)} + * calls. + */ + public void setFlipY(boolean flip) { + flipY = flip; + } + + /** + * Renders the surfaceTexture to the framebuffer with optional vertical flip. + * + *

Before calling this, {@link #setup} must have been called. + * + *

NOTE: Calls {@link SurfaceTexture#updateTexImage()} on passed surface texture. + */ + public void render(SurfaceTexture surfaceTexture) { + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + ShaderUtil.checkGlError("glActiveTexture"); + surfaceTexture.updateTexImage(); // This implicitly binds the texture. + surfaceTexture.getTransformMatrix(textureTransformMatrix); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + ShaderUtil.checkGlError("glTexParameteri"); + + GLES20.glUseProgram(program); + ShaderUtil.checkGlError("glUseProgram"); + GLES20.glUniform1i(frameUniform, 0); + ShaderUtil.checkGlError("glUniform1i"); + GLES20.glUniformMatrix4fv(textureTransformUniform, 1, false, textureTransformMatrix, 0); + ShaderUtil.checkGlError("glUniformMatrix4fv"); + GLES20.glEnableVertexAttribArray(ATTRIB_POSITION); + GLES20.glVertexAttribPointer( + ATTRIB_POSITION, 2, GLES20.GL_FLOAT, false, 0, CommonShaders.SQUARE_VERTICES); + + GLES20.glEnableVertexAttribArray(ATTRIB_TEXTURE_COORDINATE); + GLES20.glVertexAttribPointer( + ATTRIB_TEXTURE_COORDINATE, + 2, + GLES20.GL_FLOAT, + false, + 0, + flipY ? FLIPPED_TEXTURE_VERTICES : TEXTURE_VERTICES); + ShaderUtil.checkGlError("program setup"); + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + ShaderUtil.checkGlError("glDrawArrays"); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + ShaderUtil.checkGlError("glBindTexture"); + + // TODO: add sync and go back to glFlush() + GLES20.glFinish(); + } + + /** + * Call this to delete the shader program. + * + *

This is only necessary if one wants to release the program while keeping the context around. + */ + public void release() { + GLES20.glDeleteProgram(program); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/GlThread.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/GlThread.java new file mode 100644 index 000000000..f1985b5c5 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/GlThread.java @@ -0,0 +1,215 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import android.opengl.GLES20; +import android.os.Handler; +import android.os.Looper; +import android.util.Log; +import javax.annotation.Nullable; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLSurface; + +/** + * A thread that manages an OpenGL context. + * + *

A given context can only be used by a single thread at a time. Furthermore, on at least some + * Android devices, changing the current context is reported to take a long time (on the order of + * several ms). Therefore it is often convenient to have a dedicated thread for rendering to a + * context. + */ +public class GlThread extends Thread { + private static final String TAG = "GlThread"; + private static final String THREAD_NAME = "mediapipe.glutil.GlThread"; + + private boolean ready; + private final Object startLock = new Object(); + + protected volatile EglManager eglManager; + protected EGLSurface eglSurface = null; + protected Handler handler = null; // must be created on the thread itself + protected Looper looper = null; // must be created on the thread itself + protected int framebuffer = 0; + + /** + * Creates a GlThread. + * + * @param parentContext another EGL context with which to share data (e.g. textures); can be an + * {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null. + */ + public GlThread(@Nullable Object parentContext) { + this(parentContext, null); + } + + /** + * Creates a GlThread. + * + * @param parentContext another EGL context with which to share data (e.g. textures); can be an + * {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null. + * @param additionalConfigAttributes a list of attributes for eglChooseConfig to be added to the + * default ones. + */ + public GlThread(@Nullable Object parentContext, @Nullable int[] additionalConfigAttributes) { + eglManager = new EglManager(parentContext, additionalConfigAttributes); + setName(THREAD_NAME); + } + + /** + * Returns the Handler associated with this thread. + */ + public Handler getHandler() { + return handler; + } + + /** Returns the Looper associated with this thread. */ + public Looper getLooper() { + return looper; + } + + /** Returns the EglManager managing this thread's context. */ + public EglManager getEglManager() { + return eglManager; + } + + /** + * Returns the EGLContext associated with this thread. Do not use it on another thread. + */ + public EGLContext getEGLContext() { + return eglManager.getContext(); + } + + /** + * Returns the framebuffer object used by this thread. + */ + public int getFramebuffer() { + return framebuffer; + } + + /** + * Binds a texture to the color attachment of the framebuffer. + */ + public void bindFramebuffer(int texture, int width, int height) { + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer); + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, + GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, + texture, + 0); + int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER); + if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) { + throw new RuntimeException("Framebuffer not complete, status=" + status); + } + GLES20.glViewport(0, 0, width, height); + ShaderUtil.checkGlError("glViewport"); + } + + @Override + public void run() { + Looper.prepare(); + handler = createHandler(); + looper = Looper.myLooper(); + + Log.d(TAG, String.format("Starting GL thread %s", getName())); + + prepareGl(); + + synchronized (startLock) { + ready = true; + startLock.notify(); // signal waitUntilReady() + } + + Looper.loop(); + + looper = null; + + releaseGl(); + eglManager.release(); + + Log.d(TAG, String.format("Stopping GL thread %s", getName())); + + synchronized (startLock) { + ready = false; + } + } + + /** Terminates the thread, after processing all pending messages. */ + public boolean quitSafely() { + if (looper == null) { + return false; + } + looper.quitSafely(); + return true; + } + + /** + * Waits until the thread has finished setting up the handler and invoking {@link prepareGl}. + */ + public void waitUntilReady() throws InterruptedException { + // We wait in a loop to deal with spurious wakeups. However, we do not + // catch the InterruptedException, because we have no way of knowing what + // the application expects. On one hand, the called expects the thread to + // be ready when this method returns, which means we would have to keep + // looping. But on the other hand, if they interrupt the thread they may + // not want it to continue execution. We have no choice but to propagate + // the exception and let the caller make the decision. + synchronized (startLock) { + while (!ready) { + startLock.wait(); + } + } + } + + /** Sets up the OpenGL context. Can be overridden to set up additional resources. */ + public void prepareGl() { + eglSurface = createEglSurface(); + eglManager.makeCurrent(eglSurface, eglSurface); + + GLES20.glDisable(GLES20.GL_DEPTH_TEST); + GLES20.glDisable(GLES20.GL_CULL_FACE); + int[] values = new int[1]; + GLES20.glGenFramebuffers(1, values, 0); + framebuffer = values[0]; + } + + /** Releases the resources created in prepareGl. */ + public void releaseGl() { + if (framebuffer != 0) { + int[] values = new int[1]; + values[0] = framebuffer; + GLES20.glDeleteFramebuffers(1, values, 0); + framebuffer = 0; + } + + eglManager.makeNothingCurrent(); + if (eglSurface != null) { + eglManager.releaseSurface(eglSurface); + eglSurface = null; + } + } + + /** + * Factory method that creates the handler used by the thread. Can be overridden to use a custom + * {@link Handler}. + */ + protected Handler createHandler() { + return new Handler(); + } + + /** Factory method that creates the surface used by the thread. */ + protected EGLSurface createEglSurface() { + return eglManager.createOffscreenSurface(1, 1); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ShaderUtil.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ShaderUtil.java new file mode 100644 index 000000000..1cdaed041 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/ShaderUtil.java @@ -0,0 +1,178 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import android.graphics.Bitmap; +import android.opengl.GLES20; +import android.opengl.GLUtils; +import com.google.common.flogger.FluentLogger; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.util.Map; +import javax.annotation.Nullable; + +/** + * Utility class for managing GLSL shaders. + */ +public class ShaderUtil { + private static final FluentLogger logger = FluentLogger.forEnclosingClass(); + + /** + * Loads a shader from source. + * @param shaderType a valid GL shader type, e.g. {@link GLES20#GL_VERTEX_SHADER} or + * {@link GLES20#GL_FRAGMENT_SHADER}. + * @param source the shader's source in text form. + * @return a handle to the created shader, or 0 in case of error. + */ + public static int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + logger.atSevere().log("Could not compile shader %d: %s", shaderType, + GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + + /** + * Creates a shader program. + * + * @param vertexSource source of the vertex shader. + * @param fragmentSource source of the fragment shader. + * @param attributeLocations a map of desired locations for attributes. Can be null. + * @return a handle to the created program, or 0 in case of error. + */ + public static int createProgram( + String vertexSource, + String fragmentSource, + @Nullable Map attributeLocations) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (fragmentShader == 0) { + return 0; + } + + int program = GLES20.glCreateProgram(); + if (program == 0) { + logger.atSevere().log("Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + GLES20.glAttachShader(program, fragmentShader); + + if (attributeLocations != null) { + for (Map.Entry entry : attributeLocations.entrySet()) { + GLES20.glBindAttribLocation(program, entry.getValue(), entry.getKey()); + } + } + + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + logger.atSevere().log("Could not link program: %s", GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + + /** + * Creates a texture. Binds it to texture unit 0 to perform setup. + * @return the name of the new texture. + */ + public static int createRgbaTexture(int width, int height) { + final int[] textureName = new int[] {0}; + GLES20.glGenTextures(1, textureName, 0); + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName[0]); + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + 0, + GLES20.GL_RGBA, + width, height, + 0, + GLES20.GL_RGBA, + GLES20.GL_UNSIGNED_BYTE, + null); + ShaderUtil.checkGlError("glTexImage2D"); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + ShaderUtil.checkGlError("texture setup"); + return textureName[0]; + } + + /** + * Creates a texture from a Bitmap. Binds it to texture unit 0 to perform setup. + * + * @return the name of the new texture. + */ + public static int createRgbaTexture(Bitmap bitmap) { + final int[] textureName = new int[] {0}; + GLES20.glGenTextures(1, textureName, 0); + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName[0]); + GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); + ShaderUtil.checkGlError("texImage2D"); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + ShaderUtil.checkGlError("texture setup"); + return textureName[0]; + } + + /** + * Creates a {@link FloatBuffer} with the given arguments as contents. + * The buffer is created in native format for efficient use with OpenGL. + */ + public static FloatBuffer floatBuffer(float... values) { + ByteBuffer byteBuffer = + ByteBuffer.allocateDirect( + values.length * 4 /* sizeof(float) */); + // use the device hardware's native byte order + byteBuffer.order(ByteOrder.nativeOrder()); + + // create a floating point buffer from the ByteBuffer + FloatBuffer floatBuffer = byteBuffer.asFloatBuffer(); + // add the coordinates to the FloatBuffer + floatBuffer.put(values); + // set the buffer to read the first coordinate + floatBuffer.position(0); + return floatBuffer; + } + + /** + * Calls {@link GLES20#glGetError} and raises an exception if there was an error. + */ + public static void checkGlError(String msg) { + int error = GLES20.glGetError(); + if (error != GLES20.GL_NO_ERROR) { + throw new RuntimeException(msg + ": GL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/TextureRenderer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/TextureRenderer.java new file mode 100644 index 000000000..da785fd8a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/glutil/TextureRenderer.java @@ -0,0 +1,105 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.glutil; + +import android.opengl.GLES20; +import android.opengl.Matrix; +import java.nio.FloatBuffer; +import java.util.HashMap; +import java.util.Map; + +/** Simple renderer for a normal 2D texture. */ +public class TextureRenderer { + private static final FloatBuffer TEXTURE_VERTICES = + ShaderUtil.floatBuffer( + 0.0f, 0.0f, // bottom left + 1.0f, 0.0f, // bottom right + 0.0f, 1.0f, // top left + 1.0f, 1.0f // top right + ); + + private static final String TAG = "TextureRenderer"; + private static final int ATTRIB_POSITION = 1; + private static final int ATTRIB_TEXTURE_COORDINATE = 2; + + private int program = 0; + private int frameUniform; + private int textureTransformUniform; + private float[] textureTransformMatrix = new float[16]; + + /** Call this to setup the shader program before rendering. */ + public void setup() { + Map attributeLocations = new HashMap<>(); + attributeLocations.put("position", ATTRIB_POSITION); + attributeLocations.put("texture_coordinate", ATTRIB_TEXTURE_COORDINATE); + program = + ShaderUtil.createProgram( + CommonShaders.VERTEX_SHADER, CommonShaders.FRAGMENT_SHADER, attributeLocations); + frameUniform = GLES20.glGetUniformLocation(program, "video_frame"); + textureTransformUniform = GLES20.glGetUniformLocation(program, "texture_transform"); + ShaderUtil.checkGlError("glGetUniformLocation"); + Matrix.setIdentityM(textureTransformMatrix, 0 /* offset */); + } + + /** + * Renders a texture to the framebuffer. + * + *

Before calling this, {@link #setup} must have been called. + */ + public void render(int textureName) { + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + ShaderUtil.checkGlError("glActiveTexture"); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName); + ShaderUtil.checkGlError("glBindTexture"); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + ShaderUtil.checkGlError("glTexParameteri"); + + GLES20.glUseProgram(program); + ShaderUtil.checkGlError("glUseProgram"); + GLES20.glUniform1i(frameUniform, 0); + ShaderUtil.checkGlError("glUniform1i"); + GLES20.glUniformMatrix4fv(textureTransformUniform, 1, false, textureTransformMatrix, 0); + ShaderUtil.checkGlError("glUniformMatrix4fv"); + GLES20.glEnableVertexAttribArray(ATTRIB_POSITION); + GLES20.glVertexAttribPointer( + ATTRIB_POSITION, 2, GLES20.GL_FLOAT, false, 0, CommonShaders.SQUARE_VERTICES); + + GLES20.glEnableVertexAttribArray(ATTRIB_TEXTURE_COORDINATE); + GLES20.glVertexAttribPointer( + ATTRIB_TEXTURE_COORDINATE, 2, GLES20.GL_FLOAT, false, 0, TEXTURE_VERTICES); + ShaderUtil.checkGlError("program setup"); + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + ShaderUtil.checkGlError("glDrawArrays"); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + ShaderUtil.checkGlError("glBindTexture"); + + GLES20.glFlush(); + } + + /** + * Call this to delete the shader program. + * + *

This is only necessary if one wants to release the program while keeping the context around. + */ + public void release() { + GLES20.glDeleteProgram(program); + } +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/AudioDecoder.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/AudioDecoder.java new file mode 100644 index 000000000..c21891d6a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/AudioDecoder.java @@ -0,0 +1,2101 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/util/audio_decoder.proto + +package mediapipe; + +public final class AudioDecoder { + private AudioDecoder() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + registry.add(mediapipe.AudioDecoder.AudioDecoderOptions.ext); + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface AudioStreamOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.AudioStreamOptions) + com.google.protobuf.MessageOrBuilder { + + /** + *

+     * The stream to decode.  Stream indexes start from 0 (audio and video
+     * are handled separately).
+     * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + boolean hasStreamIndex(); + /** + *
+     * The stream to decode.  Stream indexes start from 0 (audio and video
+     * are handled separately).
+     * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + long getStreamIndex(); + + /** + *
+     * Process the file despite this stream not being present.
+     * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + boolean hasAllowMissing(); + /** + *
+     * Process the file despite this stream not being present.
+     * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + boolean getAllowMissing(); + + /** + *
+     * If true, failures to decode a frame of data will be ignored.
+     * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + boolean hasIgnoreDecodeFailures(); + /** + *
+     * If true, failures to decode a frame of data will be ignored.
+     * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + boolean getIgnoreDecodeFailures(); + + /** + *
+     * Output packets with regressing timestamps. By default those packets are
+     * dropped.
+     * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + boolean hasOutputRegressingTimestamps(); + /** + *
+     * Output packets with regressing timestamps. By default those packets are
+     * dropped.
+     * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + boolean getOutputRegressingTimestamps(); + + /** + *
+     * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+     * we detect any rollover and continue incrementing timestamps past this
+     * point. Set this flag if you want non-regressing timestamps for MPEG
+     * content where the PTS may roll over.
+     * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + boolean hasCorrectPtsForRollover(); + /** + *
+     * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+     * we detect any rollover and continue incrementing timestamps past this
+     * point. Set this flag if you want non-regressing timestamps for MPEG
+     * content where the PTS may roll over.
+     * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + boolean getCorrectPtsForRollover(); + } + /** + * Protobuf type {@code mediapipe.AudioStreamOptions} + */ + public static final class AudioStreamOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.AudioStreamOptions) + AudioStreamOptionsOrBuilder { + // Use AudioStreamOptions.newBuilder() to construct. + private AudioStreamOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AudioStreamOptions() { + streamIndex_ = 0L; + allowMissing_ = false; + ignoreDecodeFailures_ = false; + outputRegressingTimestamps_ = false; + correctPtsForRollover_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AudioStreamOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + streamIndex_ = input.readInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + allowMissing_ = input.readBool(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + ignoreDecodeFailures_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + outputRegressingTimestamps_ = input.readBool(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + correctPtsForRollover_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioStreamOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioStreamOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.AudioDecoder.AudioStreamOptions.class, mediapipe.AudioDecoder.AudioStreamOptions.Builder.class); + } + + private int bitField0_; + public static final int STREAM_INDEX_FIELD_NUMBER = 1; + private long streamIndex_; + /** + *
+     * The stream to decode.  Stream indexes start from 0 (audio and video
+     * are handled separately).
+     * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public boolean hasStreamIndex() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * The stream to decode.  Stream indexes start from 0 (audio and video
+     * are handled separately).
+     * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public long getStreamIndex() { + return streamIndex_; + } + + public static final int ALLOW_MISSING_FIELD_NUMBER = 2; + private boolean allowMissing_; + /** + *
+     * Process the file despite this stream not being present.
+     * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public boolean hasAllowMissing() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * Process the file despite this stream not being present.
+     * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public boolean getAllowMissing() { + return allowMissing_; + } + + public static final int IGNORE_DECODE_FAILURES_FIELD_NUMBER = 3; + private boolean ignoreDecodeFailures_; + /** + *
+     * If true, failures to decode a frame of data will be ignored.
+     * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public boolean hasIgnoreDecodeFailures() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+     * If true, failures to decode a frame of data will be ignored.
+     * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public boolean getIgnoreDecodeFailures() { + return ignoreDecodeFailures_; + } + + public static final int OUTPUT_REGRESSING_TIMESTAMPS_FIELD_NUMBER = 4; + private boolean outputRegressingTimestamps_; + /** + *
+     * Output packets with regressing timestamps. By default those packets are
+     * dropped.
+     * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public boolean hasOutputRegressingTimestamps() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + *
+     * Output packets with regressing timestamps. By default those packets are
+     * dropped.
+     * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public boolean getOutputRegressingTimestamps() { + return outputRegressingTimestamps_; + } + + public static final int CORRECT_PTS_FOR_ROLLOVER_FIELD_NUMBER = 5; + private boolean correctPtsForRollover_; + /** + *
+     * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+     * we detect any rollover and continue incrementing timestamps past this
+     * point. Set this flag if you want non-regressing timestamps for MPEG
+     * content where the PTS may roll over.
+     * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public boolean hasCorrectPtsForRollover() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + *
+     * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+     * we detect any rollover and continue incrementing timestamps past this
+     * point. Set this flag if you want non-regressing timestamps for MPEG
+     * content where the PTS may roll over.
+     * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public boolean getCorrectPtsForRollover() { + return correctPtsForRollover_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, streamIndex_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, allowMissing_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, ignoreDecodeFailures_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, outputRegressingTimestamps_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, correctPtsForRollover_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, streamIndex_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, allowMissing_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, ignoreDecodeFailures_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, outputRegressingTimestamps_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, correctPtsForRollover_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.AudioDecoder.AudioStreamOptions)) { + return super.equals(obj); + } + mediapipe.AudioDecoder.AudioStreamOptions other = (mediapipe.AudioDecoder.AudioStreamOptions) obj; + + boolean result = true; + result = result && (hasStreamIndex() == other.hasStreamIndex()); + if (hasStreamIndex()) { + result = result && (getStreamIndex() + == other.getStreamIndex()); + } + result = result && (hasAllowMissing() == other.hasAllowMissing()); + if (hasAllowMissing()) { + result = result && (getAllowMissing() + == other.getAllowMissing()); + } + result = result && (hasIgnoreDecodeFailures() == other.hasIgnoreDecodeFailures()); + if (hasIgnoreDecodeFailures()) { + result = result && (getIgnoreDecodeFailures() + == other.getIgnoreDecodeFailures()); + } + result = result && (hasOutputRegressingTimestamps() == other.hasOutputRegressingTimestamps()); + if (hasOutputRegressingTimestamps()) { + result = result && (getOutputRegressingTimestamps() + == other.getOutputRegressingTimestamps()); + } + result = result && (hasCorrectPtsForRollover() == other.hasCorrectPtsForRollover()); + if (hasCorrectPtsForRollover()) { + result = result && (getCorrectPtsForRollover() + == other.getCorrectPtsForRollover()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasStreamIndex()) { + hash = (37 * hash) + STREAM_INDEX_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStreamIndex()); + } + if (hasAllowMissing()) { + hash = (37 * hash) + ALLOW_MISSING_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAllowMissing()); + } + if (hasIgnoreDecodeFailures()) { + hash = (37 * hash) + IGNORE_DECODE_FAILURES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIgnoreDecodeFailures()); + } + if (hasOutputRegressingTimestamps()) { + hash = (37 * hash) + OUTPUT_REGRESSING_TIMESTAMPS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOutputRegressingTimestamps()); + } + if (hasCorrectPtsForRollover()) { + hash = (37 * hash) + CORRECT_PTS_FOR_ROLLOVER_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCorrectPtsForRollover()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioStreamOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.AudioDecoder.AudioStreamOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.AudioStreamOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.AudioStreamOptions) + mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioStreamOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioStreamOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.AudioDecoder.AudioStreamOptions.class, mediapipe.AudioDecoder.AudioStreamOptions.Builder.class); + } + + // Construct using mediapipe.AudioDecoder.AudioStreamOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + streamIndex_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + allowMissing_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + ignoreDecodeFailures_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + outputRegressingTimestamps_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + correctPtsForRollover_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioStreamOptions_descriptor; + } + + public mediapipe.AudioDecoder.AudioStreamOptions getDefaultInstanceForType() { + return mediapipe.AudioDecoder.AudioStreamOptions.getDefaultInstance(); + } + + public mediapipe.AudioDecoder.AudioStreamOptions build() { + mediapipe.AudioDecoder.AudioStreamOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.AudioDecoder.AudioStreamOptions buildPartial() { + mediapipe.AudioDecoder.AudioStreamOptions result = new mediapipe.AudioDecoder.AudioStreamOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.streamIndex_ = streamIndex_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.allowMissing_ = allowMissing_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.ignoreDecodeFailures_ = ignoreDecodeFailures_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.outputRegressingTimestamps_ = outputRegressingTimestamps_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.correctPtsForRollover_ = correctPtsForRollover_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.AudioDecoder.AudioStreamOptions) { + return mergeFrom((mediapipe.AudioDecoder.AudioStreamOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.AudioDecoder.AudioStreamOptions other) { + if (other == mediapipe.AudioDecoder.AudioStreamOptions.getDefaultInstance()) return this; + if (other.hasStreamIndex()) { + setStreamIndex(other.getStreamIndex()); + } + if (other.hasAllowMissing()) { + setAllowMissing(other.getAllowMissing()); + } + if (other.hasIgnoreDecodeFailures()) { + setIgnoreDecodeFailures(other.getIgnoreDecodeFailures()); + } + if (other.hasOutputRegressingTimestamps()) { + setOutputRegressingTimestamps(other.getOutputRegressingTimestamps()); + } + if (other.hasCorrectPtsForRollover()) { + setCorrectPtsForRollover(other.getCorrectPtsForRollover()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.AudioDecoder.AudioStreamOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.AudioDecoder.AudioStreamOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private long streamIndex_ ; + /** + *
+       * The stream to decode.  Stream indexes start from 0 (audio and video
+       * are handled separately).
+       * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public boolean hasStreamIndex() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+       * The stream to decode.  Stream indexes start from 0 (audio and video
+       * are handled separately).
+       * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public long getStreamIndex() { + return streamIndex_; + } + /** + *
+       * The stream to decode.  Stream indexes start from 0 (audio and video
+       * are handled separately).
+       * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public Builder setStreamIndex(long value) { + bitField0_ |= 0x00000001; + streamIndex_ = value; + onChanged(); + return this; + } + /** + *
+       * The stream to decode.  Stream indexes start from 0 (audio and video
+       * are handled separately).
+       * 
+ * + * optional int64 stream_index = 1 [default = 0]; + */ + public Builder clearStreamIndex() { + bitField0_ = (bitField0_ & ~0x00000001); + streamIndex_ = 0L; + onChanged(); + return this; + } + + private boolean allowMissing_ ; + /** + *
+       * Process the file despite this stream not being present.
+       * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public boolean hasAllowMissing() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+       * Process the file despite this stream not being present.
+       * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public boolean getAllowMissing() { + return allowMissing_; + } + /** + *
+       * Process the file despite this stream not being present.
+       * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public Builder setAllowMissing(boolean value) { + bitField0_ |= 0x00000002; + allowMissing_ = value; + onChanged(); + return this; + } + /** + *
+       * Process the file despite this stream not being present.
+       * 
+ * + * optional bool allow_missing = 2 [default = false]; + */ + public Builder clearAllowMissing() { + bitField0_ = (bitField0_ & ~0x00000002); + allowMissing_ = false; + onChanged(); + return this; + } + + private boolean ignoreDecodeFailures_ ; + /** + *
+       * If true, failures to decode a frame of data will be ignored.
+       * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public boolean hasIgnoreDecodeFailures() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+       * If true, failures to decode a frame of data will be ignored.
+       * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public boolean getIgnoreDecodeFailures() { + return ignoreDecodeFailures_; + } + /** + *
+       * If true, failures to decode a frame of data will be ignored.
+       * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public Builder setIgnoreDecodeFailures(boolean value) { + bitField0_ |= 0x00000004; + ignoreDecodeFailures_ = value; + onChanged(); + return this; + } + /** + *
+       * If true, failures to decode a frame of data will be ignored.
+       * 
+ * + * optional bool ignore_decode_failures = 3 [default = false]; + */ + public Builder clearIgnoreDecodeFailures() { + bitField0_ = (bitField0_ & ~0x00000004); + ignoreDecodeFailures_ = false; + onChanged(); + return this; + } + + private boolean outputRegressingTimestamps_ ; + /** + *
+       * Output packets with regressing timestamps. By default those packets are
+       * dropped.
+       * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public boolean hasOutputRegressingTimestamps() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + *
+       * Output packets with regressing timestamps. By default those packets are
+       * dropped.
+       * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public boolean getOutputRegressingTimestamps() { + return outputRegressingTimestamps_; + } + /** + *
+       * Output packets with regressing timestamps. By default those packets are
+       * dropped.
+       * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public Builder setOutputRegressingTimestamps(boolean value) { + bitField0_ |= 0x00000008; + outputRegressingTimestamps_ = value; + onChanged(); + return this; + } + /** + *
+       * Output packets with regressing timestamps. By default those packets are
+       * dropped.
+       * 
+ * + * optional bool output_regressing_timestamps = 4 [default = false]; + */ + public Builder clearOutputRegressingTimestamps() { + bitField0_ = (bitField0_ & ~0x00000008); + outputRegressingTimestamps_ = false; + onChanged(); + return this; + } + + private boolean correctPtsForRollover_ ; + /** + *
+       * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+       * we detect any rollover and continue incrementing timestamps past this
+       * point. Set this flag if you want non-regressing timestamps for MPEG
+       * content where the PTS may roll over.
+       * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public boolean hasCorrectPtsForRollover() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + *
+       * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+       * we detect any rollover and continue incrementing timestamps past this
+       * point. Set this flag if you want non-regressing timestamps for MPEG
+       * content where the PTS may roll over.
+       * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public boolean getCorrectPtsForRollover() { + return correctPtsForRollover_; + } + /** + *
+       * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+       * we detect any rollover and continue incrementing timestamps past this
+       * point. Set this flag if you want non-regressing timestamps for MPEG
+       * content where the PTS may roll over.
+       * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public Builder setCorrectPtsForRollover(boolean value) { + bitField0_ |= 0x00000010; + correctPtsForRollover_ = value; + onChanged(); + return this; + } + /** + *
+       * MPEG PTS timestamps roll over back to 0 after 26.5h. If this flag is set
+       * we detect any rollover and continue incrementing timestamps past this
+       * point. Set this flag if you want non-regressing timestamps for MPEG
+       * content where the PTS may roll over.
+       * 
+ * + * optional bool correct_pts_for_rollover = 5; + */ + public Builder clearCorrectPtsForRollover() { + bitField0_ = (bitField0_ & ~0x00000010); + correctPtsForRollover_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.AudioStreamOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.AudioStreamOptions) + private static final mediapipe.AudioDecoder.AudioStreamOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.AudioDecoder.AudioStreamOptions(); + } + + public static mediapipe.AudioDecoder.AudioStreamOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AudioStreamOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AudioStreamOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.AudioDecoder.AudioStreamOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface AudioDecoderOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.AudioDecoderOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + java.util.List + getAudioStreamList(); + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + mediapipe.AudioDecoder.AudioStreamOptions getAudioStream(int index); + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + int getAudioStreamCount(); + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + java.util.List + getAudioStreamOrBuilderList(); + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder getAudioStreamOrBuilder( + int index); + + /** + *
+     * The start time in seconds to decode.
+     * 
+ * + * optional double start_time = 2; + */ + boolean hasStartTime(); + /** + *
+     * The start time in seconds to decode.
+     * 
+ * + * optional double start_time = 2; + */ + double getStartTime(); + + /** + *
+     * The end time in seconds to decode (inclusive).
+     * 
+ * + * optional double end_time = 3; + */ + boolean hasEndTime(); + /** + *
+     * The end time in seconds to decode (inclusive).
+     * 
+ * + * optional double end_time = 3; + */ + double getEndTime(); + } + /** + * Protobuf type {@code mediapipe.AudioDecoderOptions} + */ + public static final class AudioDecoderOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.AudioDecoderOptions) + AudioDecoderOptionsOrBuilder { + // Use AudioDecoderOptions.newBuilder() to construct. + private AudioDecoderOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AudioDecoderOptions() { + audioStream_ = java.util.Collections.emptyList(); + startTime_ = 0D; + endTime_ = 0D; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AudioDecoderOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + audioStream_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + audioStream_.add( + input.readMessage(mediapipe.AudioDecoder.AudioStreamOptions.PARSER, extensionRegistry)); + break; + } + case 17: { + bitField0_ |= 0x00000001; + startTime_ = input.readDouble(); + break; + } + case 25: { + bitField0_ |= 0x00000002; + endTime_ = input.readDouble(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + audioStream_ = java.util.Collections.unmodifiableList(audioStream_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioDecoderOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioDecoderOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.AudioDecoder.AudioDecoderOptions.class, mediapipe.AudioDecoder.AudioDecoderOptions.Builder.class); + } + + private int bitField0_; + public static final int AUDIO_STREAM_FIELD_NUMBER = 1; + private java.util.List audioStream_; + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public java.util.List getAudioStreamList() { + return audioStream_; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public java.util.List + getAudioStreamOrBuilderList() { + return audioStream_; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public int getAudioStreamCount() { + return audioStream_.size(); + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptions getAudioStream(int index) { + return audioStream_.get(index); + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder getAudioStreamOrBuilder( + int index) { + return audioStream_.get(index); + } + + public static final int START_TIME_FIELD_NUMBER = 2; + private double startTime_; + /** + *
+     * The start time in seconds to decode.
+     * 
+ * + * optional double start_time = 2; + */ + public boolean hasStartTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * The start time in seconds to decode.
+     * 
+ * + * optional double start_time = 2; + */ + public double getStartTime() { + return startTime_; + } + + public static final int END_TIME_FIELD_NUMBER = 3; + private double endTime_; + /** + *
+     * The end time in seconds to decode (inclusive).
+     * 
+ * + * optional double end_time = 3; + */ + public boolean hasEndTime() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * The end time in seconds to decode (inclusive).
+     * 
+ * + * optional double end_time = 3; + */ + public double getEndTime() { + return endTime_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < audioStream_.size(); i++) { + output.writeMessage(1, audioStream_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeDouble(2, startTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeDouble(3, endTime_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < audioStream_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, audioStream_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(2, startTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, endTime_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.AudioDecoder.AudioDecoderOptions)) { + return super.equals(obj); + } + mediapipe.AudioDecoder.AudioDecoderOptions other = (mediapipe.AudioDecoder.AudioDecoderOptions) obj; + + boolean result = true; + result = result && getAudioStreamList() + .equals(other.getAudioStreamList()); + result = result && (hasStartTime() == other.hasStartTime()); + if (hasStartTime()) { + result = result && ( + java.lang.Double.doubleToLongBits(getStartTime()) + == java.lang.Double.doubleToLongBits( + other.getStartTime())); + } + result = result && (hasEndTime() == other.hasEndTime()); + if (hasEndTime()) { + result = result && ( + java.lang.Double.doubleToLongBits(getEndTime()) + == java.lang.Double.doubleToLongBits( + other.getEndTime())); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getAudioStreamCount() > 0) { + hash = (37 * hash) + AUDIO_STREAM_FIELD_NUMBER; + hash = (53 * hash) + getAudioStreamList().hashCode(); + } + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getStartTime())); + } + if (hasEndTime()) { + hash = (37 * hash) + END_TIME_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getEndTime())); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.AudioDecoder.AudioDecoderOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.AudioDecoder.AudioDecoderOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.AudioDecoderOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.AudioDecoderOptions) + mediapipe.AudioDecoder.AudioDecoderOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioDecoderOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioDecoderOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.AudioDecoder.AudioDecoderOptions.class, mediapipe.AudioDecoder.AudioDecoderOptions.Builder.class); + } + + // Construct using mediapipe.AudioDecoder.AudioDecoderOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getAudioStreamFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (audioStreamBuilder_ == null) { + audioStream_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + audioStreamBuilder_.clear(); + } + startTime_ = 0D; + bitField0_ = (bitField0_ & ~0x00000002); + endTime_ = 0D; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.AudioDecoder.internal_static_mediapipe_AudioDecoderOptions_descriptor; + } + + public mediapipe.AudioDecoder.AudioDecoderOptions getDefaultInstanceForType() { + return mediapipe.AudioDecoder.AudioDecoderOptions.getDefaultInstance(); + } + + public mediapipe.AudioDecoder.AudioDecoderOptions build() { + mediapipe.AudioDecoder.AudioDecoderOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.AudioDecoder.AudioDecoderOptions buildPartial() { + mediapipe.AudioDecoder.AudioDecoderOptions result = new mediapipe.AudioDecoder.AudioDecoderOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (audioStreamBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + audioStream_ = java.util.Collections.unmodifiableList(audioStream_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.audioStream_ = audioStream_; + } else { + result.audioStream_ = audioStreamBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.startTime_ = startTime_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.endTime_ = endTime_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.AudioDecoder.AudioDecoderOptions) { + return mergeFrom((mediapipe.AudioDecoder.AudioDecoderOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.AudioDecoder.AudioDecoderOptions other) { + if (other == mediapipe.AudioDecoder.AudioDecoderOptions.getDefaultInstance()) return this; + if (audioStreamBuilder_ == null) { + if (!other.audioStream_.isEmpty()) { + if (audioStream_.isEmpty()) { + audioStream_ = other.audioStream_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureAudioStreamIsMutable(); + audioStream_.addAll(other.audioStream_); + } + onChanged(); + } + } else { + if (!other.audioStream_.isEmpty()) { + if (audioStreamBuilder_.isEmpty()) { + audioStreamBuilder_.dispose(); + audioStreamBuilder_ = null; + audioStream_ = other.audioStream_; + bitField0_ = (bitField0_ & ~0x00000001); + audioStreamBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getAudioStreamFieldBuilder() : null; + } else { + audioStreamBuilder_.addAllMessages(other.audioStream_); + } + } + } + if (other.hasStartTime()) { + setStartTime(other.getStartTime()); + } + if (other.hasEndTime()) { + setEndTime(other.getEndTime()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.AudioDecoder.AudioDecoderOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.AudioDecoder.AudioDecoderOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List audioStream_ = + java.util.Collections.emptyList(); + private void ensureAudioStreamIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + audioStream_ = new java.util.ArrayList(audioStream_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.AudioDecoder.AudioStreamOptions, mediapipe.AudioDecoder.AudioStreamOptions.Builder, mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder> audioStreamBuilder_; + + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public java.util.List getAudioStreamList() { + if (audioStreamBuilder_ == null) { + return java.util.Collections.unmodifiableList(audioStream_); + } else { + return audioStreamBuilder_.getMessageList(); + } + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public int getAudioStreamCount() { + if (audioStreamBuilder_ == null) { + return audioStream_.size(); + } else { + return audioStreamBuilder_.getCount(); + } + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptions getAudioStream(int index) { + if (audioStreamBuilder_ == null) { + return audioStream_.get(index); + } else { + return audioStreamBuilder_.getMessage(index); + } + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder setAudioStream( + int index, mediapipe.AudioDecoder.AudioStreamOptions value) { + if (audioStreamBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAudioStreamIsMutable(); + audioStream_.set(index, value); + onChanged(); + } else { + audioStreamBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder setAudioStream( + int index, mediapipe.AudioDecoder.AudioStreamOptions.Builder builderForValue) { + if (audioStreamBuilder_ == null) { + ensureAudioStreamIsMutable(); + audioStream_.set(index, builderForValue.build()); + onChanged(); + } else { + audioStreamBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder addAudioStream(mediapipe.AudioDecoder.AudioStreamOptions value) { + if (audioStreamBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAudioStreamIsMutable(); + audioStream_.add(value); + onChanged(); + } else { + audioStreamBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder addAudioStream( + int index, mediapipe.AudioDecoder.AudioStreamOptions value) { + if (audioStreamBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAudioStreamIsMutable(); + audioStream_.add(index, value); + onChanged(); + } else { + audioStreamBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder addAudioStream( + mediapipe.AudioDecoder.AudioStreamOptions.Builder builderForValue) { + if (audioStreamBuilder_ == null) { + ensureAudioStreamIsMutable(); + audioStream_.add(builderForValue.build()); + onChanged(); + } else { + audioStreamBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder addAudioStream( + int index, mediapipe.AudioDecoder.AudioStreamOptions.Builder builderForValue) { + if (audioStreamBuilder_ == null) { + ensureAudioStreamIsMutable(); + audioStream_.add(index, builderForValue.build()); + onChanged(); + } else { + audioStreamBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder addAllAudioStream( + java.lang.Iterable values) { + if (audioStreamBuilder_ == null) { + ensureAudioStreamIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, audioStream_); + onChanged(); + } else { + audioStreamBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder clearAudioStream() { + if (audioStreamBuilder_ == null) { + audioStream_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + audioStreamBuilder_.clear(); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public Builder removeAudioStream(int index) { + if (audioStreamBuilder_ == null) { + ensureAudioStreamIsMutable(); + audioStream_.remove(index); + onChanged(); + } else { + audioStreamBuilder_.remove(index); + } + return this; + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptions.Builder getAudioStreamBuilder( + int index) { + return getAudioStreamFieldBuilder().getBuilder(index); + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder getAudioStreamOrBuilder( + int index) { + if (audioStreamBuilder_ == null) { + return audioStream_.get(index); } else { + return audioStreamBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public java.util.List + getAudioStreamOrBuilderList() { + if (audioStreamBuilder_ != null) { + return audioStreamBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(audioStream_); + } + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptions.Builder addAudioStreamBuilder() { + return getAudioStreamFieldBuilder().addBuilder( + mediapipe.AudioDecoder.AudioStreamOptions.getDefaultInstance()); + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public mediapipe.AudioDecoder.AudioStreamOptions.Builder addAudioStreamBuilder( + int index) { + return getAudioStreamFieldBuilder().addBuilder( + index, mediapipe.AudioDecoder.AudioStreamOptions.getDefaultInstance()); + } + /** + * repeated .mediapipe.AudioStreamOptions audio_stream = 1; + */ + public java.util.List + getAudioStreamBuilderList() { + return getAudioStreamFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.AudioDecoder.AudioStreamOptions, mediapipe.AudioDecoder.AudioStreamOptions.Builder, mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder> + getAudioStreamFieldBuilder() { + if (audioStreamBuilder_ == null) { + audioStreamBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.AudioDecoder.AudioStreamOptions, mediapipe.AudioDecoder.AudioStreamOptions.Builder, mediapipe.AudioDecoder.AudioStreamOptionsOrBuilder>( + audioStream_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + audioStream_ = null; + } + return audioStreamBuilder_; + } + + private double startTime_ ; + /** + *
+       * The start time in seconds to decode.
+       * 
+ * + * optional double start_time = 2; + */ + public boolean hasStartTime() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+       * The start time in seconds to decode.
+       * 
+ * + * optional double start_time = 2; + */ + public double getStartTime() { + return startTime_; + } + /** + *
+       * The start time in seconds to decode.
+       * 
+ * + * optional double start_time = 2; + */ + public Builder setStartTime(double value) { + bitField0_ |= 0x00000002; + startTime_ = value; + onChanged(); + return this; + } + /** + *
+       * The start time in seconds to decode.
+       * 
+ * + * optional double start_time = 2; + */ + public Builder clearStartTime() { + bitField0_ = (bitField0_ & ~0x00000002); + startTime_ = 0D; + onChanged(); + return this; + } + + private double endTime_ ; + /** + *
+       * The end time in seconds to decode (inclusive).
+       * 
+ * + * optional double end_time = 3; + */ + public boolean hasEndTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+       * The end time in seconds to decode (inclusive).
+       * 
+ * + * optional double end_time = 3; + */ + public double getEndTime() { + return endTime_; + } + /** + *
+       * The end time in seconds to decode (inclusive).
+       * 
+ * + * optional double end_time = 3; + */ + public Builder setEndTime(double value) { + bitField0_ |= 0x00000004; + endTime_ = value; + onChanged(); + return this; + } + /** + *
+       * The end time in seconds to decode (inclusive).
+       * 
+ * + * optional double end_time = 3; + */ + public Builder clearEndTime() { + bitField0_ = (bitField0_ & ~0x00000004); + endTime_ = 0D; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.AudioDecoderOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.AudioDecoderOptions) + private static final mediapipe.AudioDecoder.AudioDecoderOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.AudioDecoder.AudioDecoderOptions(); + } + + public static mediapipe.AudioDecoder.AudioDecoderOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AudioDecoderOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AudioDecoderOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.AudioDecoder.AudioDecoderOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + public static final int EXT_FIELD_NUMBER = 263370674; + /** + * extend .mediapipe.CalculatorOptions { ... } + */ + public static final + com.google.protobuf.GeneratedMessage.GeneratedExtension< + com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions, + mediapipe.AudioDecoder.AudioDecoderOptions> ext = com.google.protobuf.GeneratedMessage + .newMessageScopedGeneratedExtension( + mediapipe.AudioDecoder.AudioDecoderOptions.getDefaultInstance(), + 0, + mediapipe.AudioDecoder.AudioDecoderOptions.class, + mediapipe.AudioDecoder.AudioDecoderOptions.getDefaultInstance()); + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_AudioStreamOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_AudioStreamOptions_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_AudioDecoderOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_AudioDecoderOptions_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\"mediapipe/util/audio_decoder.proto\022\tme" + + "diapipe\032$mediapipe/framework/calculator." + + "proto\"\301\001\n\022AudioStreamOptions\022\027\n\014stream_i" + + "ndex\030\001 \001(\003:\0010\022\034\n\rallow_missing\030\002 \001(\010:\005fa" + + "lse\022%\n\026ignore_decode_failures\030\003 \001(\010:\005fal" + + "se\022+\n\034output_regressing_timestamps\030\004 \001(\010" + + ":\005false\022 \n\030correct_pts_for_rollover\030\005 \001(" + + "\010\"\276\001\n\023AudioDecoderOptions\0223\n\014audio_strea" + + "m\030\001 \003(\0132\035.mediapipe.AudioStreamOptions\022\022" + + "\n\nstart_time\030\002 \001(\001\022\020\n\010end_time\030\003 \001(\0012L\n\003", + "ext\022\034.mediapipe.CalculatorOptions\030\262\357\312} \001" + + "(\0132\036.mediapipe.AudioDecoderOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.mediapipe.proto.CalculatorProto.getDescriptor(), + }, assigner); + internal_static_mediapipe_AudioStreamOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_AudioStreamOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_AudioStreamOptions_descriptor, + new java.lang.String[] { "StreamIndex", "AllowMissing", "IgnoreDecodeFailures", "OutputRegressingTimestamps", "CorrectPtsForRollover", }); + internal_static_mediapipe_AudioDecoderOptions_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_mediapipe_AudioDecoderOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_AudioDecoderOptions_descriptor, + new java.lang.String[] { "AudioStream", "StartTime", "EndTime", }); + com.google.mediapipe.proto.CalculatorProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/ColorOuterClass.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/ColorOuterClass.java new file mode 100644 index 000000000..a5fc6f132 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/ColorOuterClass.java @@ -0,0 +1,1409 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/util/color.proto + +package mediapipe; + +public final class ColorOuterClass { + private ColorOuterClass() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface ColorOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.Color) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 r = 1; + */ + boolean hasR(); + /** + * optional int32 r = 1; + */ + int getR(); + + /** + * optional int32 g = 2; + */ + boolean hasG(); + /** + * optional int32 g = 2; + */ + int getG(); + + /** + * optional int32 b = 3; + */ + boolean hasB(); + /** + * optional int32 b = 3; + */ + int getB(); + } + /** + * Protobuf type {@code mediapipe.Color} + */ + public static final class Color extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.Color) + ColorOrBuilder { + // Use Color.newBuilder() to construct. + private Color(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Color() { + r_ = 0; + g_ = 0; + b_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Color( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + r_ = input.readInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + g_ = input.readInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + b_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_Color_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_Color_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.ColorOuterClass.Color.class, mediapipe.ColorOuterClass.Color.Builder.class); + } + + private int bitField0_; + public static final int R_FIELD_NUMBER = 1; + private int r_; + /** + * optional int32 r = 1; + */ + public boolean hasR() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 r = 1; + */ + public int getR() { + return r_; + } + + public static final int G_FIELD_NUMBER = 2; + private int g_; + /** + * optional int32 g = 2; + */ + public boolean hasG() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int32 g = 2; + */ + public int getG() { + return g_; + } + + public static final int B_FIELD_NUMBER = 3; + private int b_; + /** + * optional int32 b = 3; + */ + public boolean hasB() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int32 b = 3; + */ + public int getB() { + return b_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, r_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt32(2, g_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeInt32(3, b_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, r_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, g_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, b_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.ColorOuterClass.Color)) { + return super.equals(obj); + } + mediapipe.ColorOuterClass.Color other = (mediapipe.ColorOuterClass.Color) obj; + + boolean result = true; + result = result && (hasR() == other.hasR()); + if (hasR()) { + result = result && (getR() + == other.getR()); + } + result = result && (hasG() == other.hasG()); + if (hasG()) { + result = result && (getG() + == other.getG()); + } + result = result && (hasB() == other.hasB()); + if (hasB()) { + result = result && (getB() + == other.getB()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasR()) { + hash = (37 * hash) + R_FIELD_NUMBER; + hash = (53 * hash) + getR(); + } + if (hasG()) { + hash = (37 * hash) + G_FIELD_NUMBER; + hash = (53 * hash) + getG(); + } + if (hasB()) { + hash = (37 * hash) + B_FIELD_NUMBER; + hash = (53 * hash) + getB(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.ColorOuterClass.Color parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.ColorOuterClass.Color parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.ColorOuterClass.Color parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.ColorOuterClass.Color parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.ColorOuterClass.Color parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.Color parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.ColorOuterClass.Color parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.Color parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.ColorOuterClass.Color parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.Color parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.ColorOuterClass.Color prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.Color} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.Color) + mediapipe.ColorOuterClass.ColorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_Color_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_Color_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.ColorOuterClass.Color.class, mediapipe.ColorOuterClass.Color.Builder.class); + } + + // Construct using mediapipe.ColorOuterClass.Color.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + r_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + g_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + b_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_Color_descriptor; + } + + public mediapipe.ColorOuterClass.Color getDefaultInstanceForType() { + return mediapipe.ColorOuterClass.Color.getDefaultInstance(); + } + + public mediapipe.ColorOuterClass.Color build() { + mediapipe.ColorOuterClass.Color result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.ColorOuterClass.Color buildPartial() { + mediapipe.ColorOuterClass.Color result = new mediapipe.ColorOuterClass.Color(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.r_ = r_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.g_ = g_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.b_ = b_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.ColorOuterClass.Color) { + return mergeFrom((mediapipe.ColorOuterClass.Color)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.ColorOuterClass.Color other) { + if (other == mediapipe.ColorOuterClass.Color.getDefaultInstance()) return this; + if (other.hasR()) { + setR(other.getR()); + } + if (other.hasG()) { + setG(other.getG()); + } + if (other.hasB()) { + setB(other.getB()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.ColorOuterClass.Color parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.ColorOuterClass.Color) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int r_ ; + /** + * optional int32 r = 1; + */ + public boolean hasR() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 r = 1; + */ + public int getR() { + return r_; + } + /** + * optional int32 r = 1; + */ + public Builder setR(int value) { + bitField0_ |= 0x00000001; + r_ = value; + onChanged(); + return this; + } + /** + * optional int32 r = 1; + */ + public Builder clearR() { + bitField0_ = (bitField0_ & ~0x00000001); + r_ = 0; + onChanged(); + return this; + } + + private int g_ ; + /** + * optional int32 g = 2; + */ + public boolean hasG() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int32 g = 2; + */ + public int getG() { + return g_; + } + /** + * optional int32 g = 2; + */ + public Builder setG(int value) { + bitField0_ |= 0x00000002; + g_ = value; + onChanged(); + return this; + } + /** + * optional int32 g = 2; + */ + public Builder clearG() { + bitField0_ = (bitField0_ & ~0x00000002); + g_ = 0; + onChanged(); + return this; + } + + private int b_ ; + /** + * optional int32 b = 3; + */ + public boolean hasB() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int32 b = 3; + */ + public int getB() { + return b_; + } + /** + * optional int32 b = 3; + */ + public Builder setB(int value) { + bitField0_ |= 0x00000004; + b_ = value; + onChanged(); + return this; + } + /** + * optional int32 b = 3; + */ + public Builder clearB() { + bitField0_ = (bitField0_ & ~0x00000004); + b_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.Color) + } + + // @@protoc_insertion_point(class_scope:mediapipe.Color) + private static final mediapipe.ColorOuterClass.Color DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.ColorOuterClass.Color(); + } + + public static mediapipe.ColorOuterClass.Color getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Color parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Color(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.ColorOuterClass.Color getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ColorMapOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.ColorMap) + com.google.protobuf.MessageOrBuilder { + + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + int getLabelToColorCount(); + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + boolean containsLabelToColor( + java.lang.String key); + /** + * Use {@link #getLabelToColorMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getLabelToColor(); + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + java.util.Map + getLabelToColorMap(); + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + mediapipe.ColorOuterClass.Color getLabelToColorOrDefault( + java.lang.String key, + mediapipe.ColorOuterClass.Color defaultValue); + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + mediapipe.ColorOuterClass.Color getLabelToColorOrThrow( + java.lang.String key); + } + /** + *
+   * Mapping from string label to a color.
+   * 
+ * + * Protobuf type {@code mediapipe.ColorMap} + */ + public static final class ColorMap extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.ColorMap) + ColorMapOrBuilder { + // Use ColorMap.newBuilder() to construct. + private ColorMap(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ColorMap() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColorMap( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + labelToColor_ = com.google.protobuf.MapField.newMapField( + LabelToColorDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000001; + } + com.google.protobuf.MapEntry + labelToColor = input.readMessage( + LabelToColorDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + labelToColor_.getMutableMap().put(labelToColor.getKey(), labelToColor.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetLabelToColor(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.ColorOuterClass.ColorMap.class, mediapipe.ColorOuterClass.ColorMap.Builder.class); + } + + public static final int LABEL_TO_COLOR_FIELD_NUMBER = 1; + private static final class LabelToColorDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, mediapipe.ColorOuterClass.Color> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_LabelToColorEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + mediapipe.ColorOuterClass.Color.getDefaultInstance()); + } + private com.google.protobuf.MapField< + java.lang.String, mediapipe.ColorOuterClass.Color> labelToColor_; + private com.google.protobuf.MapField + internalGetLabelToColor() { + if (labelToColor_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelToColorDefaultEntryHolder.defaultEntry); + } + return labelToColor_; + } + + public int getLabelToColorCount() { + return internalGetLabelToColor().getMap().size(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public boolean containsLabelToColor( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabelToColor().getMap().containsKey(key); + } + /** + * Use {@link #getLabelToColorMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabelToColor() { + return getLabelToColorMap(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public java.util.Map getLabelToColorMap() { + return internalGetLabelToColor().getMap(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public mediapipe.ColorOuterClass.Color getLabelToColorOrDefault( + java.lang.String key, + mediapipe.ColorOuterClass.Color defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabelToColor().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public mediapipe.ColorOuterClass.Color getLabelToColorOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabelToColor().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (java.util.Map.Entry entry + : internalGetLabelToColor().getMap().entrySet()) { + com.google.protobuf.MapEntry + labelToColor = LabelToColorDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + output.writeMessage(1, labelToColor); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry + : internalGetLabelToColor().getMap().entrySet()) { + com.google.protobuf.MapEntry + labelToColor = LabelToColorDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, labelToColor); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.ColorOuterClass.ColorMap)) { + return super.equals(obj); + } + mediapipe.ColorOuterClass.ColorMap other = (mediapipe.ColorOuterClass.ColorMap) obj; + + boolean result = true; + result = result && internalGetLabelToColor().equals( + other.internalGetLabelToColor()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (!internalGetLabelToColor().getMap().isEmpty()) { + hash = (37 * hash) + LABEL_TO_COLOR_FIELD_NUMBER; + hash = (53 * hash) + internalGetLabelToColor().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.ColorOuterClass.ColorMap parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.ColorMap parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.ColorOuterClass.ColorMap parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.ColorOuterClass.ColorMap prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Mapping from string label to a color.
+     * 
+ * + * Protobuf type {@code mediapipe.ColorMap} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.ColorMap) + mediapipe.ColorOuterClass.ColorMapOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetLabelToColor(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 1: + return internalGetMutableLabelToColor(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.ColorOuterClass.ColorMap.class, mediapipe.ColorOuterClass.ColorMap.Builder.class); + } + + // Construct using mediapipe.ColorOuterClass.ColorMap.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + internalGetMutableLabelToColor().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.ColorOuterClass.internal_static_mediapipe_ColorMap_descriptor; + } + + public mediapipe.ColorOuterClass.ColorMap getDefaultInstanceForType() { + return mediapipe.ColorOuterClass.ColorMap.getDefaultInstance(); + } + + public mediapipe.ColorOuterClass.ColorMap build() { + mediapipe.ColorOuterClass.ColorMap result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.ColorOuterClass.ColorMap buildPartial() { + mediapipe.ColorOuterClass.ColorMap result = new mediapipe.ColorOuterClass.ColorMap(this); + int from_bitField0_ = bitField0_; + result.labelToColor_ = internalGetLabelToColor(); + result.labelToColor_.makeImmutable(); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.ColorOuterClass.ColorMap) { + return mergeFrom((mediapipe.ColorOuterClass.ColorMap)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.ColorOuterClass.ColorMap other) { + if (other == mediapipe.ColorOuterClass.ColorMap.getDefaultInstance()) return this; + internalGetMutableLabelToColor().mergeFrom( + other.internalGetLabelToColor()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.ColorOuterClass.ColorMap parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.ColorOuterClass.ColorMap) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, mediapipe.ColorOuterClass.Color> labelToColor_; + private com.google.protobuf.MapField + internalGetLabelToColor() { + if (labelToColor_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelToColorDefaultEntryHolder.defaultEntry); + } + return labelToColor_; + } + private com.google.protobuf.MapField + internalGetMutableLabelToColor() { + onChanged();; + if (labelToColor_ == null) { + labelToColor_ = com.google.protobuf.MapField.newMapField( + LabelToColorDefaultEntryHolder.defaultEntry); + } + if (!labelToColor_.isMutable()) { + labelToColor_ = labelToColor_.copy(); + } + return labelToColor_; + } + + public int getLabelToColorCount() { + return internalGetLabelToColor().getMap().size(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public boolean containsLabelToColor( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabelToColor().getMap().containsKey(key); + } + /** + * Use {@link #getLabelToColorMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabelToColor() { + return getLabelToColorMap(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public java.util.Map getLabelToColorMap() { + return internalGetLabelToColor().getMap(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public mediapipe.ColorOuterClass.Color getLabelToColorOrDefault( + java.lang.String key, + mediapipe.ColorOuterClass.Color defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabelToColor().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public mediapipe.ColorOuterClass.Color getLabelToColorOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabelToColor().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearLabelToColor() { + getMutableLabelToColor().clear(); + return this; + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public Builder removeLabelToColor( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableLabelToColor().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableLabelToColor() { + return internalGetMutableLabelToColor().getMutableMap(); + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + public Builder putLabelToColor( + java.lang.String key, + mediapipe.ColorOuterClass.Color value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableLabelToColor().put(key, value); + return this; + } + /** + * map<string, .mediapipe.Color> label_to_color = 1; + */ + + public Builder putAllLabelToColor( + java.util.Map values) { + getMutableLabelToColor().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.ColorMap) + } + + // @@protoc_insertion_point(class_scope:mediapipe.ColorMap) + private static final mediapipe.ColorOuterClass.ColorMap DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.ColorOuterClass.ColorMap(); + } + + public static mediapipe.ColorOuterClass.ColorMap getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ColorMap parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ColorMap(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.ColorOuterClass.ColorMap getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_Color_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_Color_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_ColorMap_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_ColorMap_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_ColorMap_LabelToColorEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_ColorMap_LabelToColorEntry_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\032mediapipe/util/color.proto\022\tmediapipe\"" + + "(\n\005Color\022\t\n\001r\030\001 \001(\005\022\t\n\001g\030\002 \001(\005\022\t\n\001b\030\003 \001(" + + "\005\"\220\001\n\010ColorMap\022=\n\016label_to_color\030\001 \003(\0132%" + + ".mediapipe.ColorMap.LabelToColorEntry\032E\n" + + "\021LabelToColorEntry\022\013\n\003key\030\001 \001(\t\022\037\n\005value" + + "\030\002 \001(\0132\020.mediapipe.Color:\0028\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_mediapipe_Color_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_Color_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_Color_descriptor, + new java.lang.String[] { "R", "G", "B", }); + internal_static_mediapipe_ColorMap_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_mediapipe_ColorMap_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_ColorMap_descriptor, + new java.lang.String[] { "LabelToColor", }); + internal_static_mediapipe_ColorMap_LabelToColorEntry_descriptor = + internal_static_mediapipe_ColorMap_descriptor.getNestedTypes().get(0); + internal_static_mediapipe_ColorMap_LabelToColorEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_ColorMap_LabelToColorEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/CopyCalculator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/CopyCalculator.java new file mode 100644 index 000000000..447084877 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/CopyCalculator.java @@ -0,0 +1,668 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/gpu/copy_calculator.proto + +package mediapipe; + +public final class CopyCalculator { + private CopyCalculator() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + registry.add(mediapipe.CopyCalculator.CopyCalculatorOptions.ext); + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface CopyCalculatorOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.CopyCalculatorOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + boolean hasRotation(); + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation(); + } + /** + * Protobuf type {@code mediapipe.CopyCalculatorOptions} + */ + public static final class CopyCalculatorOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.CopyCalculatorOptions) + CopyCalculatorOptionsOrBuilder { + // Use CopyCalculatorOptions.newBuilder() to construct. + private CopyCalculatorOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CopyCalculatorOptions() { + rotation_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CopyCalculatorOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation value = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + rotation_ = rawValue; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.CopyCalculator.CopyCalculatorOptions.class, mediapipe.CopyCalculator.CopyCalculatorOptions.Builder.class); + } + + /** + * Protobuf enum {@code mediapipe.CopyCalculatorOptions.Rotation} + */ + public enum Rotation + implements com.google.protobuf.ProtocolMessageEnum { + /** + * NONE = 0; + */ + NONE(0), + /** + *
+       * rotate 90 degrees counterclockwise
+       * 
+ * + * CCW = 1; + */ + CCW(1), + /** + *
+       * hack to rectify convfloat
+       * 
+ * + * CCW_FLIP = 2; + */ + CCW_FLIP(2), + ; + + /** + * NONE = 0; + */ + public static final int NONE_VALUE = 0; + /** + *
+       * rotate 90 degrees counterclockwise
+       * 
+ * + * CCW = 1; + */ + public static final int CCW_VALUE = 1; + /** + *
+       * hack to rectify convfloat
+       * 
+ * + * CCW_FLIP = 2; + */ + public static final int CCW_FLIP_VALUE = 2; + + + public final int getNumber() { + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Rotation valueOf(int value) { + return forNumber(value); + } + + public static Rotation forNumber(int value) { + switch (value) { + case 0: return NONE; + case 1: return CCW; + case 2: return CCW_FLIP; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Rotation> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Rotation findValueByNumber(int number) { + return Rotation.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return mediapipe.CopyCalculator.CopyCalculatorOptions.getDescriptor().getEnumTypes().get(0); + } + + private static final Rotation[] VALUES = values(); + + public static Rotation valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Rotation(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:mediapipe.CopyCalculatorOptions.Rotation) + } + + private int bitField0_; + public static final int ROTATION_FIELD_NUMBER = 1; + private int rotation_; + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public boolean hasRotation() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation() { + mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation result = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rotation_); + return result == null ? mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.NONE : result; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, rotation_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, rotation_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.CopyCalculator.CopyCalculatorOptions)) { + return super.equals(obj); + } + mediapipe.CopyCalculator.CopyCalculatorOptions other = (mediapipe.CopyCalculator.CopyCalculatorOptions) obj; + + boolean result = true; + result = result && (hasRotation() == other.hasRotation()); + if (hasRotation()) { + result = result && rotation_ == other.rotation_; + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRotation()) { + hash = (37 * hash) + ROTATION_FIELD_NUMBER; + hash = (53 * hash) + rotation_; + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.CopyCalculator.CopyCalculatorOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.CopyCalculatorOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.CopyCalculatorOptions) + mediapipe.CopyCalculator.CopyCalculatorOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.CopyCalculator.CopyCalculatorOptions.class, mediapipe.CopyCalculator.CopyCalculatorOptions.Builder.class); + } + + // Construct using mediapipe.CopyCalculator.CopyCalculatorOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + rotation_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor; + } + + public mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstanceForType() { + return mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance(); + } + + public mediapipe.CopyCalculator.CopyCalculatorOptions build() { + mediapipe.CopyCalculator.CopyCalculatorOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.CopyCalculator.CopyCalculatorOptions buildPartial() { + mediapipe.CopyCalculator.CopyCalculatorOptions result = new mediapipe.CopyCalculator.CopyCalculatorOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rotation_ = rotation_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.CopyCalculator.CopyCalculatorOptions) { + return mergeFrom((mediapipe.CopyCalculator.CopyCalculatorOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.CopyCalculator.CopyCalculatorOptions other) { + if (other == mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance()) return this; + if (other.hasRotation()) { + setRotation(other.getRotation()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.CopyCalculator.CopyCalculatorOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.CopyCalculator.CopyCalculatorOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int rotation_ = 0; + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public boolean hasRotation() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation() { + mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation result = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rotation_); + return result == null ? mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.NONE : result; + } + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public Builder setRotation(mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + rotation_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE]; + */ + public Builder clearRotation() { + bitField0_ = (bitField0_ & ~0x00000001); + rotation_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.CopyCalculatorOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.CopyCalculatorOptions) + private static final mediapipe.CopyCalculator.CopyCalculatorOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.CopyCalculator.CopyCalculatorOptions(); + } + + public static mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CopyCalculatorOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CopyCalculatorOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + public static final int EXT_FIELD_NUMBER = 161773876; + /** + * extend .mediapipe.CalculatorOptions { ... } + */ + public static final + com.google.protobuf.GeneratedMessage.GeneratedExtension< + com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions, + mediapipe.CopyCalculator.CopyCalculatorOptions> ext = com.google.protobuf.GeneratedMessage + .newMessageScopedGeneratedExtension( + mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance(), + 0, + mediapipe.CopyCalculator.CopyCalculatorOptions.class, + mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance()); + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_CopyCalculatorOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n#mediapipe/gpu/copy_calculator.proto\022\tm" + + "ediapipe\032$mediapipe/framework/calculator" + + ".proto\"\327\001\n\025CopyCalculatorOptions\022A\n\010rota" + + "tion\030\001 \001(\0162).mediapipe.CopyCalculatorOpt" + + "ions.Rotation:\004NONE\"+\n\010Rotation\022\010\n\004NONE\020" + + "\000\022\007\n\003CCW\020\001\022\014\n\010CCW_FLIP\020\0022N\n\003ext\022\034.mediap" + + "ipe.CalculatorOptions\030\264\362\221M \001(\0132 .mediapi" + + "pe.CopyCalculatorOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.mediapipe.proto.CalculatorProto.getDescriptor(), + }, assigner); + internal_static_mediapipe_CopyCalculatorOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_CopyCalculatorOptions_descriptor, + new java.lang.String[] { "Rotation", }); + com.google.mediapipe.proto.CalculatorProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/GlContextOptionsOuterClass.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/GlContextOptionsOuterClass.java new file mode 100644 index 000000000..f154a3900 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/GlContextOptionsOuterClass.java @@ -0,0 +1,619 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/gpu/gl_context_options.proto + +package mediapipe; + +public final class GlContextOptionsOuterClass { + private GlContextOptionsOuterClass() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + registry.add(mediapipe.GlContextOptionsOuterClass.GlContextOptions.ext); + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface GlContextOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.GlContextOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string gl_context_name = 1; + */ + boolean hasGlContextName(); + /** + * optional string gl_context_name = 1; + */ + java.lang.String getGlContextName(); + /** + * optional string gl_context_name = 1; + */ + com.google.protobuf.ByteString + getGlContextNameBytes(); + } + /** + * Protobuf type {@code mediapipe.GlContextOptions} + */ + public static final class GlContextOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.GlContextOptions) + GlContextOptionsOrBuilder { + // Use GlContextOptions.newBuilder() to construct. + private GlContextOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GlContextOptions() { + glContextName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GlContextOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + glContextName_ = bs; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.GlContextOptionsOuterClass.GlContextOptions.class, mediapipe.GlContextOptionsOuterClass.GlContextOptions.Builder.class); + } + + private int bitField0_; + public static final int GL_CONTEXT_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object glContextName_; + /** + * optional string gl_context_name = 1; + */ + public boolean hasGlContextName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string gl_context_name = 1; + */ + public java.lang.String getGlContextName() { + java.lang.Object ref = glContextName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + glContextName_ = s; + } + return s; + } + } + /** + * optional string gl_context_name = 1; + */ + public com.google.protobuf.ByteString + getGlContextNameBytes() { + java.lang.Object ref = glContextName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + glContextName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, glContextName_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, glContextName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.GlContextOptionsOuterClass.GlContextOptions)) { + return super.equals(obj); + } + mediapipe.GlContextOptionsOuterClass.GlContextOptions other = (mediapipe.GlContextOptionsOuterClass.GlContextOptions) obj; + + boolean result = true; + result = result && (hasGlContextName() == other.hasGlContextName()); + if (hasGlContextName()) { + result = result && getGlContextName() + .equals(other.getGlContextName()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasGlContextName()) { + hash = (37 * hash) + GL_CONTEXT_NAME_FIELD_NUMBER; + hash = (53 * hash) + getGlContextName().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.GlContextOptionsOuterClass.GlContextOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.GlContextOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.GlContextOptions) + mediapipe.GlContextOptionsOuterClass.GlContextOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.GlContextOptionsOuterClass.GlContextOptions.class, mediapipe.GlContextOptionsOuterClass.GlContextOptions.Builder.class); + } + + // Construct using mediapipe.GlContextOptionsOuterClass.GlContextOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + glContextName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor; + } + + public mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstanceForType() { + return mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance(); + } + + public mediapipe.GlContextOptionsOuterClass.GlContextOptions build() { + mediapipe.GlContextOptionsOuterClass.GlContextOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.GlContextOptionsOuterClass.GlContextOptions buildPartial() { + mediapipe.GlContextOptionsOuterClass.GlContextOptions result = new mediapipe.GlContextOptionsOuterClass.GlContextOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.glContextName_ = glContextName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.GlContextOptionsOuterClass.GlContextOptions) { + return mergeFrom((mediapipe.GlContextOptionsOuterClass.GlContextOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.GlContextOptionsOuterClass.GlContextOptions other) { + if (other == mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance()) return this; + if (other.hasGlContextName()) { + bitField0_ |= 0x00000001; + glContextName_ = other.glContextName_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.GlContextOptionsOuterClass.GlContextOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.GlContextOptionsOuterClass.GlContextOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object glContextName_ = ""; + /** + * optional string gl_context_name = 1; + */ + public boolean hasGlContextName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string gl_context_name = 1; + */ + public java.lang.String getGlContextName() { + java.lang.Object ref = glContextName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + glContextName_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string gl_context_name = 1; + */ + public com.google.protobuf.ByteString + getGlContextNameBytes() { + java.lang.Object ref = glContextName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + glContextName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string gl_context_name = 1; + */ + public Builder setGlContextName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + glContextName_ = value; + onChanged(); + return this; + } + /** + * optional string gl_context_name = 1; + */ + public Builder clearGlContextName() { + bitField0_ = (bitField0_ & ~0x00000001); + glContextName_ = getDefaultInstance().getGlContextName(); + onChanged(); + return this; + } + /** + * optional string gl_context_name = 1; + */ + public Builder setGlContextNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + glContextName_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.GlContextOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.GlContextOptions) + private static final mediapipe.GlContextOptionsOuterClass.GlContextOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.GlContextOptionsOuterClass.GlContextOptions(); + } + + public static mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GlContextOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GlContextOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + public static final int EXT_FIELD_NUMBER = 222332034; + /** + * extend .mediapipe.CalculatorOptions { ... } + */ + public static final + com.google.protobuf.GeneratedMessage.GeneratedExtension< + com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions, + mediapipe.GlContextOptionsOuterClass.GlContextOptions> ext = com.google.protobuf.GeneratedMessage + .newMessageScopedGeneratedExtension( + mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance(), + 0, + mediapipe.GlContextOptionsOuterClass.GlContextOptions.class, + mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance()); + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_GlContextOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_GlContextOptions_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n&mediapipe/gpu/gl_context_options.proto" + + "\022\tmediapipe\032$mediapipe/framework/calcula" + + "tor.proto\"v\n\020GlContextOptions\022\027\n\017gl_cont" + + "ext_name\030\001 \001(\t2I\n\003ext\022\034.mediapipe.Calcul" + + "atorOptions\030\202\211\202j \001(\0132\033.mediapipe.GlConte" + + "xtOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.mediapipe.proto.CalculatorProto.getDescriptor(), + }, assigner); + internal_static_mediapipe_GlContextOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_GlContextOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_GlContextOptions_descriptor, + new java.lang.String[] { "GlContextName", }); + com.google.mediapipe.proto.CalculatorProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketFactory.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketFactory.java new file mode 100644 index 000000000..d748fb9e3 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketFactory.java @@ -0,0 +1,2564 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/framework/packet_factory.proto + +package mediapipe; + +public final class PacketFactory { + private PacketFactory() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface PacketFactoryOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.PacketFactoryOptions) + com.google.protobuf.GeneratedMessageV3. + ExtendableMessageOrBuilder { + } + /** + *
+   * Options used by a PacketFactory to create the Packet.
+   * 
+ * + * Protobuf type {@code mediapipe.PacketFactoryOptions} + */ + public static final class PacketFactoryOptions extends + com.google.protobuf.GeneratedMessageV3.ExtendableMessage< + PacketFactoryOptions> implements + // @@protoc_insertion_point(message_implements:mediapipe.PacketFactoryOptions) + PacketFactoryOptionsOrBuilder { + // Use PacketFactoryOptions.newBuilder() to construct. + private PacketFactoryOptions(com.google.protobuf.GeneratedMessageV3.ExtendableBuilder builder) { + super(builder); + } + private PacketFactoryOptions() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PacketFactoryOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketFactoryOptions.class, mediapipe.PacketFactory.PacketFactoryOptions.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!extensionsAreInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .ExtendableMessage.ExtensionWriter + extensionWriter = newExtensionWriter(); + extensionWriter.writeUntil(536870912, output); + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += extensionsSerializedSize(); + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.PacketFactory.PacketFactoryOptions)) { + return super.equals(obj); + } + mediapipe.PacketFactory.PacketFactoryOptions other = (mediapipe.PacketFactory.PacketFactoryOptions) obj; + + boolean result = true; + result = result && unknownFields.equals(other.unknownFields); + result = result && + getExtensionFields().equals(other.getExtensionFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = hashFields(hash, getExtensionFields()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.PacketFactory.PacketFactoryOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Options used by a PacketFactory to create the Packet.
+     * 
+ * + * Protobuf type {@code mediapipe.PacketFactoryOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< + mediapipe.PacketFactory.PacketFactoryOptions, Builder> implements + // @@protoc_insertion_point(builder_implements:mediapipe.PacketFactoryOptions) + mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketFactoryOptions.class, mediapipe.PacketFactory.PacketFactoryOptions.Builder.class); + } + + // Construct using mediapipe.PacketFactory.PacketFactoryOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryOptions_descriptor; + } + + public mediapipe.PacketFactory.PacketFactoryOptions getDefaultInstanceForType() { + return mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance(); + } + + public mediapipe.PacketFactory.PacketFactoryOptions build() { + mediapipe.PacketFactory.PacketFactoryOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.PacketFactory.PacketFactoryOptions buildPartial() { + mediapipe.PacketFactory.PacketFactoryOptions result = new mediapipe.PacketFactory.PacketFactoryOptions(this); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder setExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketFactory.PacketFactoryOptions, Type> extension, + Type value) { + return (Builder) super.setExtension(extension, value); + } + public Builder setExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketFactory.PacketFactoryOptions, java.util.List> extension, + int index, Type value) { + return (Builder) super.setExtension(extension, index, value); + } + public Builder addExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketFactory.PacketFactoryOptions, java.util.List> extension, + Type value) { + return (Builder) super.addExtension(extension, value); + } + public Builder clearExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketFactory.PacketFactoryOptions, ?> extension) { + return (Builder) super.clearExtension(extension); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.PacketFactory.PacketFactoryOptions) { + return mergeFrom((mediapipe.PacketFactory.PacketFactoryOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.PacketFactory.PacketFactoryOptions other) { + if (other == mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance()) return this; + this.mergeExtensionFields(other); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!extensionsAreInitialized()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.PacketFactory.PacketFactoryOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.PacketFactory.PacketFactoryOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.PacketFactoryOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.PacketFactoryOptions) + private static final mediapipe.PacketFactory.PacketFactoryOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.PacketFactory.PacketFactoryOptions(); + } + + public static mediapipe.PacketFactory.PacketFactoryOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PacketFactoryOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PacketFactoryOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.PacketFactory.PacketFactoryOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PacketFactoryConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.PacketFactoryConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + boolean hasPacketFactory(); + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + java.lang.String getPacketFactory(); + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + com.google.protobuf.ByteString + getPacketFactoryBytes(); + + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + boolean hasOutputSidePacket(); + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + java.lang.String getOutputSidePacket(); + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + com.google.protobuf.ByteString + getOutputSidePacketBytes(); + + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + boolean hasExternalOutput(); + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + java.lang.String getExternalOutput(); + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + com.google.protobuf.ByteString + getExternalOutputBytes(); + + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + boolean hasOptions(); + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + mediapipe.PacketFactory.PacketFactoryOptions getOptions(); + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder getOptionsOrBuilder(); + } + /** + *
+   * A PacketFactory creates a side packet.
+   * 
+ * + * Protobuf type {@code mediapipe.PacketFactoryConfig} + */ + public static final class PacketFactoryConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.PacketFactoryConfig) + PacketFactoryConfigOrBuilder { + // Use PacketFactoryConfig.newBuilder() to construct. + private PacketFactoryConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private PacketFactoryConfig() { + packetFactory_ = ""; + outputSidePacket_ = ""; + externalOutput_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PacketFactoryConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + packetFactory_ = bs; + break; + } + case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000002; + outputSidePacket_ = bs; + break; + } + case 26: { + mediapipe.PacketFactory.PacketFactoryOptions.Builder subBuilder = null; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + subBuilder = options_.toBuilder(); + } + options_ = input.readMessage(mediapipe.PacketFactory.PacketFactoryOptions.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(options_); + options_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000008; + break; + } + case 8018: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000004; + externalOutput_ = bs; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketFactoryConfig.class, mediapipe.PacketFactory.PacketFactoryConfig.Builder.class); + } + + private int bitField0_; + public static final int PACKET_FACTORY_FIELD_NUMBER = 1; + private volatile java.lang.Object packetFactory_; + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + public boolean hasPacketFactory() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + public java.lang.String getPacketFactory() { + java.lang.Object ref = packetFactory_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + packetFactory_ = s; + } + return s; + } + } + /** + *
+     * The name of the registered packet factory class.
+     * 
+ * + * optional string packet_factory = 1; + */ + public com.google.protobuf.ByteString + getPacketFactoryBytes() { + java.lang.Object ref = packetFactory_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + packetFactory_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OUTPUT_SIDE_PACKET_FIELD_NUMBER = 2; + private volatile java.lang.Object outputSidePacket_; + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + public boolean hasOutputSidePacket() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + public java.lang.String getOutputSidePacket() { + java.lang.Object ref = outputSidePacket_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputSidePacket_ = s; + } + return s; + } + } + /** + *
+     * The name of the output side packet that this packet factory creates.
+     * 
+ * + * optional string output_side_packet = 2; + */ + public com.google.protobuf.ByteString + getOutputSidePacketBytes() { + java.lang.Object ref = outputSidePacket_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputSidePacket_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int EXTERNAL_OUTPUT_FIELD_NUMBER = 1002; + private volatile java.lang.Object externalOutput_; + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + public boolean hasExternalOutput() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + public java.lang.String getExternalOutput() { + java.lang.Object ref = externalOutput_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + externalOutput_ = s; + } + return s; + } + } + /** + *
+     * DEPRECATED: The old name for output_side_packet.
+     * 
+ * + * optional string external_output = 1002; + */ + public com.google.protobuf.ByteString + getExternalOutputBytes() { + java.lang.Object ref = externalOutput_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + externalOutput_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OPTIONS_FIELD_NUMBER = 3; + private mediapipe.PacketFactory.PacketFactoryOptions options_; + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public mediapipe.PacketFactory.PacketFactoryOptions getOptions() { + return options_ == null ? mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance() : options_; + } + /** + *
+     * The options for the packet factory.
+     * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder getOptionsOrBuilder() { + return options_ == null ? mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance() : options_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasOptions()) { + if (!getOptions().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, packetFactory_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputSidePacket_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(3, getOptions()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1002, externalOutput_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, packetFactory_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputSidePacket_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getOptions()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1002, externalOutput_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.PacketFactory.PacketFactoryConfig)) { + return super.equals(obj); + } + mediapipe.PacketFactory.PacketFactoryConfig other = (mediapipe.PacketFactory.PacketFactoryConfig) obj; + + boolean result = true; + result = result && (hasPacketFactory() == other.hasPacketFactory()); + if (hasPacketFactory()) { + result = result && getPacketFactory() + .equals(other.getPacketFactory()); + } + result = result && (hasOutputSidePacket() == other.hasOutputSidePacket()); + if (hasOutputSidePacket()) { + result = result && getOutputSidePacket() + .equals(other.getOutputSidePacket()); + } + result = result && (hasExternalOutput() == other.hasExternalOutput()); + if (hasExternalOutput()) { + result = result && getExternalOutput() + .equals(other.getExternalOutput()); + } + result = result && (hasOptions() == other.hasOptions()); + if (hasOptions()) { + result = result && getOptions() + .equals(other.getOptions()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPacketFactory()) { + hash = (37 * hash) + PACKET_FACTORY_FIELD_NUMBER; + hash = (53 * hash) + getPacketFactory().hashCode(); + } + if (hasOutputSidePacket()) { + hash = (37 * hash) + OUTPUT_SIDE_PACKET_FIELD_NUMBER; + hash = (53 * hash) + getOutputSidePacket().hashCode(); + } + if (hasExternalOutput()) { + hash = (37 * hash) + EXTERNAL_OUTPUT_FIELD_NUMBER; + hash = (53 * hash) + getExternalOutput().hashCode(); + } + if (hasOptions()) { + hash = (37 * hash) + OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptions().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketFactoryConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.PacketFactory.PacketFactoryConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A PacketFactory creates a side packet.
+     * 
+ * + * Protobuf type {@code mediapipe.PacketFactoryConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.PacketFactoryConfig) + mediapipe.PacketFactory.PacketFactoryConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketFactoryConfig.class, mediapipe.PacketFactory.PacketFactoryConfig.Builder.class); + } + + // Construct using mediapipe.PacketFactory.PacketFactoryConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getOptionsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + packetFactory_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + outputSidePacket_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + externalOutput_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + if (optionsBuilder_ == null) { + options_ = null; + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketFactoryConfig_descriptor; + } + + public mediapipe.PacketFactory.PacketFactoryConfig getDefaultInstanceForType() { + return mediapipe.PacketFactory.PacketFactoryConfig.getDefaultInstance(); + } + + public mediapipe.PacketFactory.PacketFactoryConfig build() { + mediapipe.PacketFactory.PacketFactoryConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.PacketFactory.PacketFactoryConfig buildPartial() { + mediapipe.PacketFactory.PacketFactoryConfig result = new mediapipe.PacketFactory.PacketFactoryConfig(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.packetFactory_ = packetFactory_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.outputSidePacket_ = outputSidePacket_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.externalOutput_ = externalOutput_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (optionsBuilder_ == null) { + result.options_ = options_; + } else { + result.options_ = optionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.PacketFactory.PacketFactoryConfig) { + return mergeFrom((mediapipe.PacketFactory.PacketFactoryConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.PacketFactory.PacketFactoryConfig other) { + if (other == mediapipe.PacketFactory.PacketFactoryConfig.getDefaultInstance()) return this; + if (other.hasPacketFactory()) { + bitField0_ |= 0x00000001; + packetFactory_ = other.packetFactory_; + onChanged(); + } + if (other.hasOutputSidePacket()) { + bitField0_ |= 0x00000002; + outputSidePacket_ = other.outputSidePacket_; + onChanged(); + } + if (other.hasExternalOutput()) { + bitField0_ |= 0x00000004; + externalOutput_ = other.externalOutput_; + onChanged(); + } + if (other.hasOptions()) { + mergeOptions(other.getOptions()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasOptions()) { + if (!getOptions().isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.PacketFactory.PacketFactoryConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.PacketFactory.PacketFactoryConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object packetFactory_ = ""; + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public boolean hasPacketFactory() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public java.lang.String getPacketFactory() { + java.lang.Object ref = packetFactory_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + packetFactory_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public com.google.protobuf.ByteString + getPacketFactoryBytes() { + java.lang.Object ref = packetFactory_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + packetFactory_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public Builder setPacketFactory( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + packetFactory_ = value; + onChanged(); + return this; + } + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public Builder clearPacketFactory() { + bitField0_ = (bitField0_ & ~0x00000001); + packetFactory_ = getDefaultInstance().getPacketFactory(); + onChanged(); + return this; + } + /** + *
+       * The name of the registered packet factory class.
+       * 
+ * + * optional string packet_factory = 1; + */ + public Builder setPacketFactoryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + packetFactory_ = value; + onChanged(); + return this; + } + + private java.lang.Object outputSidePacket_ = ""; + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public boolean hasOutputSidePacket() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public java.lang.String getOutputSidePacket() { + java.lang.Object ref = outputSidePacket_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputSidePacket_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public com.google.protobuf.ByteString + getOutputSidePacketBytes() { + java.lang.Object ref = outputSidePacket_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputSidePacket_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public Builder setOutputSidePacket( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputSidePacket_ = value; + onChanged(); + return this; + } + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public Builder clearOutputSidePacket() { + bitField0_ = (bitField0_ & ~0x00000002); + outputSidePacket_ = getDefaultInstance().getOutputSidePacket(); + onChanged(); + return this; + } + /** + *
+       * The name of the output side packet that this packet factory creates.
+       * 
+ * + * optional string output_side_packet = 2; + */ + public Builder setOutputSidePacketBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputSidePacket_ = value; + onChanged(); + return this; + } + + private java.lang.Object externalOutput_ = ""; + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public boolean hasExternalOutput() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public java.lang.String getExternalOutput() { + java.lang.Object ref = externalOutput_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + externalOutput_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public com.google.protobuf.ByteString + getExternalOutputBytes() { + java.lang.Object ref = externalOutput_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + externalOutput_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public Builder setExternalOutput( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + externalOutput_ = value; + onChanged(); + return this; + } + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public Builder clearExternalOutput() { + bitField0_ = (bitField0_ & ~0x00000004); + externalOutput_ = getDefaultInstance().getExternalOutput(); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED: The old name for output_side_packet.
+       * 
+ * + * optional string external_output = 1002; + */ + public Builder setExternalOutputBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + externalOutput_ = value; + onChanged(); + return this; + } + + private mediapipe.PacketFactory.PacketFactoryOptions options_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryOptions, mediapipe.PacketFactory.PacketFactoryOptions.Builder, mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder> optionsBuilder_; + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public mediapipe.PacketFactory.PacketFactoryOptions getOptions() { + if (optionsBuilder_ == null) { + return options_ == null ? mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance() : options_; + } else { + return optionsBuilder_.getMessage(); + } + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public Builder setOptions(mediapipe.PacketFactory.PacketFactoryOptions value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + optionsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public Builder setOptions( + mediapipe.PacketFactory.PacketFactoryOptions.Builder builderForValue) { + if (optionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + optionsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public Builder mergeOptions(mediapipe.PacketFactory.PacketFactoryOptions value) { + if (optionsBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + options_ != null && + options_ != mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance()) { + options_ = + mediapipe.PacketFactory.PacketFactoryOptions.newBuilder(options_).mergeFrom(value).buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + optionsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public Builder clearOptions() { + if (optionsBuilder_ == null) { + options_ = null; + onChanged(); + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public mediapipe.PacketFactory.PacketFactoryOptions.Builder getOptionsBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getOptionsFieldBuilder().getBuilder(); + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + public mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder getOptionsOrBuilder() { + if (optionsBuilder_ != null) { + return optionsBuilder_.getMessageOrBuilder(); + } else { + return options_ == null ? + mediapipe.PacketFactory.PacketFactoryOptions.getDefaultInstance() : options_; + } + } + /** + *
+       * The options for the packet factory.
+       * 
+ * + * optional .mediapipe.PacketFactoryOptions options = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryOptions, mediapipe.PacketFactory.PacketFactoryOptions.Builder, mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder> + getOptionsFieldBuilder() { + if (optionsBuilder_ == null) { + optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryOptions, mediapipe.PacketFactory.PacketFactoryOptions.Builder, mediapipe.PacketFactory.PacketFactoryOptionsOrBuilder>( + getOptions(), + getParentForChildren(), + isClean()); + options_ = null; + } + return optionsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.PacketFactoryConfig) + } + + // @@protoc_insertion_point(class_scope:mediapipe.PacketFactoryConfig) + private static final mediapipe.PacketFactory.PacketFactoryConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.PacketFactory.PacketFactoryConfig(); + } + + public static mediapipe.PacketFactory.PacketFactoryConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PacketFactoryConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PacketFactoryConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.PacketFactory.PacketFactoryConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PacketManagerConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.PacketManagerConfig) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + java.util.List + getPacketList(); + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + mediapipe.PacketFactory.PacketFactoryConfig getPacket(int index); + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + int getPacketCount(); + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + java.util.List + getPacketOrBuilderList(); + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + mediapipe.PacketFactory.PacketFactoryConfigOrBuilder getPacketOrBuilder( + int index); + } + /** + *
+   * The configuration for a PacketManager.
+   * 
+ * + * Protobuf type {@code mediapipe.PacketManagerConfig} + */ + public static final class PacketManagerConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.PacketManagerConfig) + PacketManagerConfigOrBuilder { + // Use PacketManagerConfig.newBuilder() to construct. + private PacketManagerConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private PacketManagerConfig() { + packet_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PacketManagerConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + packet_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + packet_.add( + input.readMessage(mediapipe.PacketFactory.PacketFactoryConfig.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + packet_ = java.util.Collections.unmodifiableList(packet_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketManagerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketManagerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketManagerConfig.class, mediapipe.PacketFactory.PacketManagerConfig.Builder.class); + } + + public static final int PACKET_FIELD_NUMBER = 1; + private java.util.List packet_; + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public java.util.List getPacketList() { + return packet_; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public java.util.List + getPacketOrBuilderList() { + return packet_; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public int getPacketCount() { + return packet_.size(); + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfig getPacket(int index) { + return packet_.get(index); + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfigOrBuilder getPacketOrBuilder( + int index) { + return packet_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + for (int i = 0; i < getPacketCount(); i++) { + if (!getPacket(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < packet_.size(); i++) { + output.writeMessage(1, packet_.get(i)); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < packet_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, packet_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.PacketFactory.PacketManagerConfig)) { + return super.equals(obj); + } + mediapipe.PacketFactory.PacketManagerConfig other = (mediapipe.PacketFactory.PacketManagerConfig) obj; + + boolean result = true; + result = result && getPacketList() + .equals(other.getPacketList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getPacketCount() > 0) { + hash = (37 * hash) + PACKET_FIELD_NUMBER; + hash = (53 * hash) + getPacketList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketFactory.PacketManagerConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.PacketFactory.PacketManagerConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * The configuration for a PacketManager.
+     * 
+ * + * Protobuf type {@code mediapipe.PacketManagerConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.PacketManagerConfig) + mediapipe.PacketFactory.PacketManagerConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketManagerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketManagerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketFactory.PacketManagerConfig.class, mediapipe.PacketFactory.PacketManagerConfig.Builder.class); + } + + // Construct using mediapipe.PacketFactory.PacketManagerConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getPacketFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (packetBuilder_ == null) { + packet_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + packetBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.PacketFactory.internal_static_mediapipe_PacketManagerConfig_descriptor; + } + + public mediapipe.PacketFactory.PacketManagerConfig getDefaultInstanceForType() { + return mediapipe.PacketFactory.PacketManagerConfig.getDefaultInstance(); + } + + public mediapipe.PacketFactory.PacketManagerConfig build() { + mediapipe.PacketFactory.PacketManagerConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.PacketFactory.PacketManagerConfig buildPartial() { + mediapipe.PacketFactory.PacketManagerConfig result = new mediapipe.PacketFactory.PacketManagerConfig(this); + int from_bitField0_ = bitField0_; + if (packetBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + packet_ = java.util.Collections.unmodifiableList(packet_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.packet_ = packet_; + } else { + result.packet_ = packetBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.PacketFactory.PacketManagerConfig) { + return mergeFrom((mediapipe.PacketFactory.PacketManagerConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.PacketFactory.PacketManagerConfig other) { + if (other == mediapipe.PacketFactory.PacketManagerConfig.getDefaultInstance()) return this; + if (packetBuilder_ == null) { + if (!other.packet_.isEmpty()) { + if (packet_.isEmpty()) { + packet_ = other.packet_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePacketIsMutable(); + packet_.addAll(other.packet_); + } + onChanged(); + } + } else { + if (!other.packet_.isEmpty()) { + if (packetBuilder_.isEmpty()) { + packetBuilder_.dispose(); + packetBuilder_ = null; + packet_ = other.packet_; + bitField0_ = (bitField0_ & ~0x00000001); + packetBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getPacketFieldBuilder() : null; + } else { + packetBuilder_.addAllMessages(other.packet_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getPacketCount(); i++) { + if (!getPacket(i).isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.PacketFactory.PacketManagerConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.PacketFactory.PacketManagerConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List packet_ = + java.util.Collections.emptyList(); + private void ensurePacketIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + packet_ = new java.util.ArrayList(packet_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryConfig, mediapipe.PacketFactory.PacketFactoryConfig.Builder, mediapipe.PacketFactory.PacketFactoryConfigOrBuilder> packetBuilder_; + + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public java.util.List getPacketList() { + if (packetBuilder_ == null) { + return java.util.Collections.unmodifiableList(packet_); + } else { + return packetBuilder_.getMessageList(); + } + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public int getPacketCount() { + if (packetBuilder_ == null) { + return packet_.size(); + } else { + return packetBuilder_.getCount(); + } + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfig getPacket(int index) { + if (packetBuilder_ == null) { + return packet_.get(index); + } else { + return packetBuilder_.getMessage(index); + } + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder setPacket( + int index, mediapipe.PacketFactory.PacketFactoryConfig value) { + if (packetBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePacketIsMutable(); + packet_.set(index, value); + onChanged(); + } else { + packetBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder setPacket( + int index, mediapipe.PacketFactory.PacketFactoryConfig.Builder builderForValue) { + if (packetBuilder_ == null) { + ensurePacketIsMutable(); + packet_.set(index, builderForValue.build()); + onChanged(); + } else { + packetBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder addPacket(mediapipe.PacketFactory.PacketFactoryConfig value) { + if (packetBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePacketIsMutable(); + packet_.add(value); + onChanged(); + } else { + packetBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder addPacket( + int index, mediapipe.PacketFactory.PacketFactoryConfig value) { + if (packetBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePacketIsMutable(); + packet_.add(index, value); + onChanged(); + } else { + packetBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder addPacket( + mediapipe.PacketFactory.PacketFactoryConfig.Builder builderForValue) { + if (packetBuilder_ == null) { + ensurePacketIsMutable(); + packet_.add(builderForValue.build()); + onChanged(); + } else { + packetBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder addPacket( + int index, mediapipe.PacketFactory.PacketFactoryConfig.Builder builderForValue) { + if (packetBuilder_ == null) { + ensurePacketIsMutable(); + packet_.add(index, builderForValue.build()); + onChanged(); + } else { + packetBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder addAllPacket( + java.lang.Iterable values) { + if (packetBuilder_ == null) { + ensurePacketIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, packet_); + onChanged(); + } else { + packetBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder clearPacket() { + if (packetBuilder_ == null) { + packet_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + packetBuilder_.clear(); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public Builder removePacket(int index) { + if (packetBuilder_ == null) { + ensurePacketIsMutable(); + packet_.remove(index); + onChanged(); + } else { + packetBuilder_.remove(index); + } + return this; + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfig.Builder getPacketBuilder( + int index) { + return getPacketFieldBuilder().getBuilder(index); + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfigOrBuilder getPacketOrBuilder( + int index) { + if (packetBuilder_ == null) { + return packet_.get(index); } else { + return packetBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public java.util.List + getPacketOrBuilderList() { + if (packetBuilder_ != null) { + return packetBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(packet_); + } + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfig.Builder addPacketBuilder() { + return getPacketFieldBuilder().addBuilder( + mediapipe.PacketFactory.PacketFactoryConfig.getDefaultInstance()); + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public mediapipe.PacketFactory.PacketFactoryConfig.Builder addPacketBuilder( + int index) { + return getPacketFieldBuilder().addBuilder( + index, mediapipe.PacketFactory.PacketFactoryConfig.getDefaultInstance()); + } + /** + * repeated .mediapipe.PacketFactoryConfig packet = 1; + */ + public java.util.List + getPacketBuilderList() { + return getPacketFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryConfig, mediapipe.PacketFactory.PacketFactoryConfig.Builder, mediapipe.PacketFactory.PacketFactoryConfigOrBuilder> + getPacketFieldBuilder() { + if (packetBuilder_ == null) { + packetBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + mediapipe.PacketFactory.PacketFactoryConfig, mediapipe.PacketFactory.PacketFactoryConfig.Builder, mediapipe.PacketFactory.PacketFactoryConfigOrBuilder>( + packet_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + packet_ = null; + } + return packetBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.PacketManagerConfig) + } + + // @@protoc_insertion_point(class_scope:mediapipe.PacketManagerConfig) + private static final mediapipe.PacketFactory.PacketManagerConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.PacketFactory.PacketManagerConfig(); + } + + public static mediapipe.PacketFactory.PacketManagerConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PacketManagerConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PacketManagerConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.PacketFactory.PacketManagerConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_PacketFactoryOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_PacketFactoryOptions_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_PacketFactoryConfig_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_PacketFactoryConfig_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_PacketManagerConfig_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_PacketManagerConfig_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n(mediapipe/framework/packet_factory.pro" + + "to\022\tmediapipe\"\"\n\024PacketFactoryOptions*\n\010" + + "\240\234\001\020\200\200\200\200\002\"\225\001\n\023PacketFactoryConfig\022\026\n\016pac" + + "ket_factory\030\001 \001(\t\022\032\n\022output_side_packet\030" + + "\002 \001(\t\022\030\n\017external_output\030\352\007 \001(\t\0220\n\007optio" + + "ns\030\003 \001(\0132\037.mediapipe.PacketFactoryOption" + + "s\"E\n\023PacketManagerConfig\022.\n\006packet\030\001 \003(\013" + + "2\036.mediapipe.PacketFactoryConfig" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_mediapipe_PacketFactoryOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_PacketFactoryOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_PacketFactoryOptions_descriptor, + new java.lang.String[] { }); + internal_static_mediapipe_PacketFactoryConfig_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_mediapipe_PacketFactoryConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_PacketFactoryConfig_descriptor, + new java.lang.String[] { "PacketFactory", "OutputSidePacket", "ExternalOutput", "Options", }); + internal_static_mediapipe_PacketManagerConfig_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_mediapipe_PacketManagerConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_PacketManagerConfig_descriptor, + new java.lang.String[] { "Packet", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketGenerator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketGenerator.java new file mode 100644 index 000000000..4e0c796db --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/PacketGenerator.java @@ -0,0 +1,2440 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/framework/packet_generator.proto + +package mediapipe; + +public final class PacketGenerator { + private PacketGenerator() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface PacketGeneratorOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.PacketGeneratorOptions) + com.google.protobuf.GeneratedMessageV3. + ExtendableMessageOrBuilder { + } + /** + *
+   * Options used by a PacketGenerator.
+   * 
+ * + * Protobuf type {@code mediapipe.PacketGeneratorOptions} + */ + public static final class PacketGeneratorOptions extends + com.google.protobuf.GeneratedMessageV3.ExtendableMessage< + PacketGeneratorOptions> implements + // @@protoc_insertion_point(message_implements:mediapipe.PacketGeneratorOptions) + PacketGeneratorOptionsOrBuilder { + // Use PacketGeneratorOptions.newBuilder() to construct. + private PacketGeneratorOptions(com.google.protobuf.GeneratedMessageV3.ExtendableBuilder builder) { + super(builder); + } + private PacketGeneratorOptions() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PacketGeneratorOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketGenerator.PacketGeneratorOptions.class, mediapipe.PacketGenerator.PacketGeneratorOptions.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!extensionsAreInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .ExtendableMessage.ExtensionWriter + extensionWriter = newExtensionWriter(); + extensionWriter.writeUntil(536870912, output); + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += extensionsSerializedSize(); + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.PacketGenerator.PacketGeneratorOptions)) { + return super.equals(obj); + } + mediapipe.PacketGenerator.PacketGeneratorOptions other = (mediapipe.PacketGenerator.PacketGeneratorOptions) obj; + + boolean result = true; + result = result && unknownFields.equals(other.unknownFields); + result = result && + getExtensionFields().equals(other.getExtensionFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = hashFields(hash, getExtensionFields()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.PacketGenerator.PacketGeneratorOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Options used by a PacketGenerator.
+     * 
+ * + * Protobuf type {@code mediapipe.PacketGeneratorOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.ExtendableBuilder< + mediapipe.PacketGenerator.PacketGeneratorOptions, Builder> implements + // @@protoc_insertion_point(builder_implements:mediapipe.PacketGeneratorOptions) + mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketGenerator.PacketGeneratorOptions.class, mediapipe.PacketGenerator.PacketGeneratorOptions.Builder.class); + } + + // Construct using mediapipe.PacketGenerator.PacketGeneratorOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorOptions_descriptor; + } + + public mediapipe.PacketGenerator.PacketGeneratorOptions getDefaultInstanceForType() { + return mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance(); + } + + public mediapipe.PacketGenerator.PacketGeneratorOptions build() { + mediapipe.PacketGenerator.PacketGeneratorOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.PacketGenerator.PacketGeneratorOptions buildPartial() { + mediapipe.PacketGenerator.PacketGeneratorOptions result = new mediapipe.PacketGenerator.PacketGeneratorOptions(this); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder setExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketGenerator.PacketGeneratorOptions, Type> extension, + Type value) { + return (Builder) super.setExtension(extension, value); + } + public Builder setExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketGenerator.PacketGeneratorOptions, java.util.List> extension, + int index, Type value) { + return (Builder) super.setExtension(extension, index, value); + } + public Builder addExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketGenerator.PacketGeneratorOptions, java.util.List> extension, + Type value) { + return (Builder) super.addExtension(extension, value); + } + public Builder clearExtension( + com.google.protobuf.GeneratedMessage.GeneratedExtension< + mediapipe.PacketGenerator.PacketGeneratorOptions, ?> extension) { + return (Builder) super.clearExtension(extension); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.PacketGenerator.PacketGeneratorOptions) { + return mergeFrom((mediapipe.PacketGenerator.PacketGeneratorOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.PacketGenerator.PacketGeneratorOptions other) { + if (other == mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance()) return this; + this.mergeExtensionFields(other); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!extensionsAreInitialized()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.PacketGenerator.PacketGeneratorOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.PacketGenerator.PacketGeneratorOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.PacketGeneratorOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.PacketGeneratorOptions) + private static final mediapipe.PacketGenerator.PacketGeneratorOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.PacketGenerator.PacketGeneratorOptions(); + } + + public static mediapipe.PacketGenerator.PacketGeneratorOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PacketGeneratorOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PacketGeneratorOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.PacketGenerator.PacketGeneratorOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PacketGeneratorConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.PacketGeneratorConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + boolean hasPacketGenerator(); + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + java.lang.String getPacketGenerator(); + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + com.google.protobuf.ByteString + getPacketGeneratorBytes(); + + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + java.util.List + getInputSidePacketList(); + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + int getInputSidePacketCount(); + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + java.lang.String getInputSidePacket(int index); + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + com.google.protobuf.ByteString + getInputSidePacketBytes(int index); + + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + java.util.List + getExternalInputList(); + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + int getExternalInputCount(); + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + java.lang.String getExternalInput(int index); + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + com.google.protobuf.ByteString + getExternalInputBytes(int index); + + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + java.util.List + getOutputSidePacketList(); + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + int getOutputSidePacketCount(); + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + java.lang.String getOutputSidePacket(int index); + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + com.google.protobuf.ByteString + getOutputSidePacketBytes(int index); + + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + java.util.List + getExternalOutputList(); + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + int getExternalOutputCount(); + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + java.lang.String getExternalOutput(int index); + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + com.google.protobuf.ByteString + getExternalOutputBytes(int index); + + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + boolean hasOptions(); + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + mediapipe.PacketGenerator.PacketGeneratorOptions getOptions(); + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder getOptionsOrBuilder(); + } + /** + *
+   * The settings specifying a packet generator and how it is connected.
+   * 
+ * + * Protobuf type {@code mediapipe.PacketGeneratorConfig} + */ + public static final class PacketGeneratorConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.PacketGeneratorConfig) + PacketGeneratorConfigOrBuilder { + // Use PacketGeneratorConfig.newBuilder() to construct. + private PacketGeneratorConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private PacketGeneratorConfig() { + packetGenerator_ = ""; + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + externalInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + outputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + externalOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PacketGeneratorConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + packetGenerator_ = bs; + break; + } + case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + inputSidePacket_.add(bs); + break; + } + case 26: { + com.google.protobuf.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + outputSidePacket_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000008; + } + outputSidePacket_.add(bs); + break; + } + case 34: { + mediapipe.PacketGenerator.PacketGeneratorOptions.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = options_.toBuilder(); + } + options_ = input.readMessage(mediapipe.PacketGenerator.PacketGeneratorOptions.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(options_); + options_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 8018: { + com.google.protobuf.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + externalInput_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + externalInput_.add(bs); + break; + } + case 8026: { + com.google.protobuf.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + externalOutput_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000010; + } + externalOutput_.add(bs); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = inputSidePacket_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + outputSidePacket_ = outputSidePacket_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + externalInput_ = externalInput_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + externalOutput_ = externalOutput_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketGenerator.PacketGeneratorConfig.class, mediapipe.PacketGenerator.PacketGeneratorConfig.Builder.class); + } + + private int bitField0_; + public static final int PACKET_GENERATOR_FIELD_NUMBER = 1; + private volatile java.lang.Object packetGenerator_; + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + public boolean hasPacketGenerator() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + public java.lang.String getPacketGenerator() { + java.lang.Object ref = packetGenerator_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + packetGenerator_ = s; + } + return s; + } + } + /** + *
+     * The name of the registered packet generator class.
+     * 
+ * + * optional string packet_generator = 1; + */ + public com.google.protobuf.ByteString + getPacketGeneratorBytes() { + java.lang.Object ref = packetGenerator_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + packetGenerator_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INPUT_SIDE_PACKET_FIELD_NUMBER = 2; + private com.google.protobuf.LazyStringList inputSidePacket_; + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ProtocolStringList + getInputSidePacketList() { + return inputSidePacket_; + } + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public int getInputSidePacketCount() { + return inputSidePacket_.size(); + } + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public java.lang.String getInputSidePacket(int index) { + return inputSidePacket_.get(index); + } + /** + *
+     * The names of the input side packets.  The PacketGenerator can choose
+     * to access its input side packets either by index or by tag.
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ByteString + getInputSidePacketBytes(int index) { + return inputSidePacket_.getByteString(index); + } + + public static final int EXTERNAL_INPUT_FIELD_NUMBER = 1002; + private com.google.protobuf.LazyStringList externalInput_; + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + public com.google.protobuf.ProtocolStringList + getExternalInputList() { + return externalInput_; + } + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + public int getExternalInputCount() { + return externalInput_.size(); + } + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + public java.lang.String getExternalInput(int index) { + return externalInput_.get(index); + } + /** + *
+     * DEPRECATED(mgeorg) The old name for input_side_packet.
+     * 
+ * + * repeated string external_input = 1002; + */ + public com.google.protobuf.ByteString + getExternalInputBytes(int index) { + return externalInput_.getByteString(index); + } + + public static final int OUTPUT_SIDE_PACKET_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList outputSidePacket_; + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + public com.google.protobuf.ProtocolStringList + getOutputSidePacketList() { + return outputSidePacket_; + } + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + public int getOutputSidePacketCount() { + return outputSidePacket_.size(); + } + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + public java.lang.String getOutputSidePacket(int index) { + return outputSidePacket_.get(index); + } + /** + *
+     * The names of the output side packets that this generator produces.
+     * The PacketGenerator can choose to access its output side packets
+     * either by index or by tag.
+     * 
+ * + * repeated string output_side_packet = 3; + */ + public com.google.protobuf.ByteString + getOutputSidePacketBytes(int index) { + return outputSidePacket_.getByteString(index); + } + + public static final int EXTERNAL_OUTPUT_FIELD_NUMBER = 1003; + private com.google.protobuf.LazyStringList externalOutput_; + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + public com.google.protobuf.ProtocolStringList + getExternalOutputList() { + return externalOutput_; + } + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + public int getExternalOutputCount() { + return externalOutput_.size(); + } + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + public java.lang.String getExternalOutput(int index) { + return externalOutput_.get(index); + } + /** + *
+     * DEPRECATED(mgeorg) The old name for output_side_packet.
+     * 
+ * + * repeated string external_output = 1003; + */ + public com.google.protobuf.ByteString + getExternalOutputBytes(int index) { + return externalOutput_.getByteString(index); + } + + public static final int OPTIONS_FIELD_NUMBER = 4; + private mediapipe.PacketGenerator.PacketGeneratorOptions options_; + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public mediapipe.PacketGenerator.PacketGeneratorOptions getOptions() { + return options_ == null ? mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance() : options_; + } + /** + *
+     * The options for the packet generator.
+     * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder getOptionsOrBuilder() { + return options_ == null ? mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance() : options_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasOptions()) { + if (!getOptions().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, packetGenerator_); + } + for (int i = 0; i < inputSidePacket_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputSidePacket_.getRaw(i)); + } + for (int i = 0; i < outputSidePacket_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputSidePacket_.getRaw(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(4, getOptions()); + } + for (int i = 0; i < externalInput_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1002, externalInput_.getRaw(i)); + } + for (int i = 0; i < externalOutput_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1003, externalOutput_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, packetGenerator_); + } + { + int dataSize = 0; + for (int i = 0; i < inputSidePacket_.size(); i++) { + dataSize += computeStringSizeNoTag(inputSidePacket_.getRaw(i)); + } + size += dataSize; + size += 1 * getInputSidePacketList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < outputSidePacket_.size(); i++) { + dataSize += computeStringSizeNoTag(outputSidePacket_.getRaw(i)); + } + size += dataSize; + size += 1 * getOutputSidePacketList().size(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getOptions()); + } + { + int dataSize = 0; + for (int i = 0; i < externalInput_.size(); i++) { + dataSize += computeStringSizeNoTag(externalInput_.getRaw(i)); + } + size += dataSize; + size += 2 * getExternalInputList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < externalOutput_.size(); i++) { + dataSize += computeStringSizeNoTag(externalOutput_.getRaw(i)); + } + size += dataSize; + size += 2 * getExternalOutputList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.PacketGenerator.PacketGeneratorConfig)) { + return super.equals(obj); + } + mediapipe.PacketGenerator.PacketGeneratorConfig other = (mediapipe.PacketGenerator.PacketGeneratorConfig) obj; + + boolean result = true; + result = result && (hasPacketGenerator() == other.hasPacketGenerator()); + if (hasPacketGenerator()) { + result = result && getPacketGenerator() + .equals(other.getPacketGenerator()); + } + result = result && getInputSidePacketList() + .equals(other.getInputSidePacketList()); + result = result && getExternalInputList() + .equals(other.getExternalInputList()); + result = result && getOutputSidePacketList() + .equals(other.getOutputSidePacketList()); + result = result && getExternalOutputList() + .equals(other.getExternalOutputList()); + result = result && (hasOptions() == other.hasOptions()); + if (hasOptions()) { + result = result && getOptions() + .equals(other.getOptions()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPacketGenerator()) { + hash = (37 * hash) + PACKET_GENERATOR_FIELD_NUMBER; + hash = (53 * hash) + getPacketGenerator().hashCode(); + } + if (getInputSidePacketCount() > 0) { + hash = (37 * hash) + INPUT_SIDE_PACKET_FIELD_NUMBER; + hash = (53 * hash) + getInputSidePacketList().hashCode(); + } + if (getExternalInputCount() > 0) { + hash = (37 * hash) + EXTERNAL_INPUT_FIELD_NUMBER; + hash = (53 * hash) + getExternalInputList().hashCode(); + } + if (getOutputSidePacketCount() > 0) { + hash = (37 * hash) + OUTPUT_SIDE_PACKET_FIELD_NUMBER; + hash = (53 * hash) + getOutputSidePacketList().hashCode(); + } + if (getExternalOutputCount() > 0) { + hash = (37 * hash) + EXTERNAL_OUTPUT_FIELD_NUMBER; + hash = (53 * hash) + getExternalOutputList().hashCode(); + } + if (hasOptions()) { + hash = (37 * hash) + OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptions().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.PacketGenerator.PacketGeneratorConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.PacketGenerator.PacketGeneratorConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * The settings specifying a packet generator and how it is connected.
+     * 
+ * + * Protobuf type {@code mediapipe.PacketGeneratorConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.PacketGeneratorConfig) + mediapipe.PacketGenerator.PacketGeneratorConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.PacketGenerator.PacketGeneratorConfig.class, mediapipe.PacketGenerator.PacketGeneratorConfig.Builder.class); + } + + // Construct using mediapipe.PacketGenerator.PacketGeneratorConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getOptionsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + packetGenerator_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + externalInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + outputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + externalOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + if (optionsBuilder_ == null) { + options_ = null; + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.PacketGenerator.internal_static_mediapipe_PacketGeneratorConfig_descriptor; + } + + public mediapipe.PacketGenerator.PacketGeneratorConfig getDefaultInstanceForType() { + return mediapipe.PacketGenerator.PacketGeneratorConfig.getDefaultInstance(); + } + + public mediapipe.PacketGenerator.PacketGeneratorConfig build() { + mediapipe.PacketGenerator.PacketGeneratorConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.PacketGenerator.PacketGeneratorConfig buildPartial() { + mediapipe.PacketGenerator.PacketGeneratorConfig result = new mediapipe.PacketGenerator.PacketGeneratorConfig(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.packetGenerator_ = packetGenerator_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = inputSidePacket_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.inputSidePacket_ = inputSidePacket_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + externalInput_ = externalInput_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.externalInput_ = externalInput_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + outputSidePacket_ = outputSidePacket_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.outputSidePacket_ = outputSidePacket_; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + externalOutput_ = externalOutput_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.externalOutput_ = externalOutput_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000002; + } + if (optionsBuilder_ == null) { + result.options_ = options_; + } else { + result.options_ = optionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.PacketGenerator.PacketGeneratorConfig) { + return mergeFrom((mediapipe.PacketGenerator.PacketGeneratorConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.PacketGenerator.PacketGeneratorConfig other) { + if (other == mediapipe.PacketGenerator.PacketGeneratorConfig.getDefaultInstance()) return this; + if (other.hasPacketGenerator()) { + bitField0_ |= 0x00000001; + packetGenerator_ = other.packetGenerator_; + onChanged(); + } + if (!other.inputSidePacket_.isEmpty()) { + if (inputSidePacket_.isEmpty()) { + inputSidePacket_ = other.inputSidePacket_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureInputSidePacketIsMutable(); + inputSidePacket_.addAll(other.inputSidePacket_); + } + onChanged(); + } + if (!other.externalInput_.isEmpty()) { + if (externalInput_.isEmpty()) { + externalInput_ = other.externalInput_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureExternalInputIsMutable(); + externalInput_.addAll(other.externalInput_); + } + onChanged(); + } + if (!other.outputSidePacket_.isEmpty()) { + if (outputSidePacket_.isEmpty()) { + outputSidePacket_ = other.outputSidePacket_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureOutputSidePacketIsMutable(); + outputSidePacket_.addAll(other.outputSidePacket_); + } + onChanged(); + } + if (!other.externalOutput_.isEmpty()) { + if (externalOutput_.isEmpty()) { + externalOutput_ = other.externalOutput_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureExternalOutputIsMutable(); + externalOutput_.addAll(other.externalOutput_); + } + onChanged(); + } + if (other.hasOptions()) { + mergeOptions(other.getOptions()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasOptions()) { + if (!getOptions().isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.PacketGenerator.PacketGeneratorConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.PacketGenerator.PacketGeneratorConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object packetGenerator_ = ""; + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public boolean hasPacketGenerator() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public java.lang.String getPacketGenerator() { + java.lang.Object ref = packetGenerator_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + packetGenerator_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public com.google.protobuf.ByteString + getPacketGeneratorBytes() { + java.lang.Object ref = packetGenerator_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + packetGenerator_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public Builder setPacketGenerator( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + packetGenerator_ = value; + onChanged(); + return this; + } + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public Builder clearPacketGenerator() { + bitField0_ = (bitField0_ & ~0x00000001); + packetGenerator_ = getDefaultInstance().getPacketGenerator(); + onChanged(); + return this; + } + /** + *
+       * The name of the registered packet generator class.
+       * 
+ * + * optional string packet_generator = 1; + */ + public Builder setPacketGeneratorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + packetGenerator_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureInputSidePacketIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = new com.google.protobuf.LazyStringArrayList(inputSidePacket_); + bitField0_ |= 0x00000002; + } + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ProtocolStringList + getInputSidePacketList() { + return inputSidePacket_.getUnmodifiableView(); + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public int getInputSidePacketCount() { + return inputSidePacket_.size(); + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public java.lang.String getInputSidePacket(int index) { + return inputSidePacket_.get(index); + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ByteString + getInputSidePacketBytes(int index) { + return inputSidePacket_.getByteString(index); + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder setInputSidePacket( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addInputSidePacket( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.add(value); + onChanged(); + return this; + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addAllInputSidePacket( + java.lang.Iterable values) { + ensureInputSidePacketIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, inputSidePacket_); + onChanged(); + return this; + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder clearInputSidePacket() { + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * The names of the input side packets.  The PacketGenerator can choose
+       * to access its input side packets either by index or by tag.
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addInputSidePacketBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList externalInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureExternalInputIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + externalInput_ = new com.google.protobuf.LazyStringArrayList(externalInput_); + bitField0_ |= 0x00000004; + } + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public com.google.protobuf.ProtocolStringList + getExternalInputList() { + return externalInput_.getUnmodifiableView(); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public int getExternalInputCount() { + return externalInput_.size(); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public java.lang.String getExternalInput(int index) { + return externalInput_.get(index); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public com.google.protobuf.ByteString + getExternalInputBytes(int index) { + return externalInput_.getByteString(index); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public Builder setExternalInput( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalInputIsMutable(); + externalInput_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public Builder addExternalInput( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalInputIsMutable(); + externalInput_.add(value); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public Builder addAllExternalInput( + java.lang.Iterable values) { + ensureExternalInputIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, externalInput_); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public Builder clearExternalInput() { + externalInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for input_side_packet.
+       * 
+ * + * repeated string external_input = 1002; + */ + public Builder addExternalInputBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalInputIsMutable(); + externalInput_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList outputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureOutputSidePacketIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + outputSidePacket_ = new com.google.protobuf.LazyStringArrayList(outputSidePacket_); + bitField0_ |= 0x00000008; + } + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public com.google.protobuf.ProtocolStringList + getOutputSidePacketList() { + return outputSidePacket_.getUnmodifiableView(); + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public int getOutputSidePacketCount() { + return outputSidePacket_.size(); + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public java.lang.String getOutputSidePacket(int index) { + return outputSidePacket_.get(index); + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public com.google.protobuf.ByteString + getOutputSidePacketBytes(int index) { + return outputSidePacket_.getByteString(index); + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public Builder setOutputSidePacket( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputSidePacketIsMutable(); + outputSidePacket_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public Builder addOutputSidePacket( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputSidePacketIsMutable(); + outputSidePacket_.add(value); + onChanged(); + return this; + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public Builder addAllOutputSidePacket( + java.lang.Iterable values) { + ensureOutputSidePacketIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, outputSidePacket_); + onChanged(); + return this; + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public Builder clearOutputSidePacket() { + outputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       * The names of the output side packets that this generator produces.
+       * The PacketGenerator can choose to access its output side packets
+       * either by index or by tag.
+       * 
+ * + * repeated string output_side_packet = 3; + */ + public Builder addOutputSidePacketBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputSidePacketIsMutable(); + outputSidePacket_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList externalOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureExternalOutputIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + externalOutput_ = new com.google.protobuf.LazyStringArrayList(externalOutput_); + bitField0_ |= 0x00000010; + } + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public com.google.protobuf.ProtocolStringList + getExternalOutputList() { + return externalOutput_.getUnmodifiableView(); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public int getExternalOutputCount() { + return externalOutput_.size(); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public java.lang.String getExternalOutput(int index) { + return externalOutput_.get(index); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public com.google.protobuf.ByteString + getExternalOutputBytes(int index) { + return externalOutput_.getByteString(index); + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public Builder setExternalOutput( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalOutputIsMutable(); + externalOutput_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public Builder addExternalOutput( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalOutputIsMutable(); + externalOutput_.add(value); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public Builder addAllExternalOutput( + java.lang.Iterable values) { + ensureExternalOutputIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, externalOutput_); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public Builder clearExternalOutput() { + externalOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       * DEPRECATED(mgeorg) The old name for output_side_packet.
+       * 
+ * + * repeated string external_output = 1003; + */ + public Builder addExternalOutputBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExternalOutputIsMutable(); + externalOutput_.add(value); + onChanged(); + return this; + } + + private mediapipe.PacketGenerator.PacketGeneratorOptions options_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketGenerator.PacketGeneratorOptions, mediapipe.PacketGenerator.PacketGeneratorOptions.Builder, mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder> optionsBuilder_; + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public mediapipe.PacketGenerator.PacketGeneratorOptions getOptions() { + if (optionsBuilder_ == null) { + return options_ == null ? mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance() : options_; + } else { + return optionsBuilder_.getMessage(); + } + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public Builder setOptions(mediapipe.PacketGenerator.PacketGeneratorOptions value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + optionsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public Builder setOptions( + mediapipe.PacketGenerator.PacketGeneratorOptions.Builder builderForValue) { + if (optionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + optionsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public Builder mergeOptions(mediapipe.PacketGenerator.PacketGeneratorOptions value) { + if (optionsBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + options_ != null && + options_ != mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance()) { + options_ = + mediapipe.PacketGenerator.PacketGeneratorOptions.newBuilder(options_).mergeFrom(value).buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + optionsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public Builder clearOptions() { + if (optionsBuilder_ == null) { + options_ = null; + onChanged(); + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public mediapipe.PacketGenerator.PacketGeneratorOptions.Builder getOptionsBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getOptionsFieldBuilder().getBuilder(); + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + public mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder getOptionsOrBuilder() { + if (optionsBuilder_ != null) { + return optionsBuilder_.getMessageOrBuilder(); + } else { + return options_ == null ? + mediapipe.PacketGenerator.PacketGeneratorOptions.getDefaultInstance() : options_; + } + } + /** + *
+       * The options for the packet generator.
+       * 
+ * + * optional .mediapipe.PacketGeneratorOptions options = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketGenerator.PacketGeneratorOptions, mediapipe.PacketGenerator.PacketGeneratorOptions.Builder, mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder> + getOptionsFieldBuilder() { + if (optionsBuilder_ == null) { + optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + mediapipe.PacketGenerator.PacketGeneratorOptions, mediapipe.PacketGenerator.PacketGeneratorOptions.Builder, mediapipe.PacketGenerator.PacketGeneratorOptionsOrBuilder>( + getOptions(), + getParentForChildren(), + isClean()); + options_ = null; + } + return optionsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.PacketGeneratorConfig) + } + + // @@protoc_insertion_point(class_scope:mediapipe.PacketGeneratorConfig) + private static final mediapipe.PacketGenerator.PacketGeneratorConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.PacketGenerator.PacketGeneratorConfig(); + } + + public static mediapipe.PacketGenerator.PacketGeneratorConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public PacketGeneratorConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PacketGeneratorConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.PacketGenerator.PacketGeneratorConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_PacketGeneratorOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_PacketGeneratorOptions_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_PacketGeneratorConfig_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_PacketGeneratorConfig_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*mediapipe/framework/packet_generator.p" + + "roto\022\tmediapipe\"$\n\026PacketGeneratorOption" + + "s*\n\010\240\234\001\020\200\200\200\200\002\"\317\001\n\025PacketGeneratorConfig\022" + + "\030\n\020packet_generator\030\001 \001(\t\022\031\n\021input_side_" + + "packet\030\002 \003(\t\022\027\n\016external_input\030\352\007 \003(\t\022\032\n" + + "\022output_side_packet\030\003 \003(\t\022\030\n\017external_ou" + + "tput\030\353\007 \003(\t\0222\n\007options\030\004 \001(\0132!.mediapipe" + + ".PacketGeneratorOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_mediapipe_PacketGeneratorOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_PacketGeneratorOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_PacketGeneratorOptions_descriptor, + new java.lang.String[] { }); + internal_static_mediapipe_PacketGeneratorConfig_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_mediapipe_PacketGeneratorConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_PacketGeneratorConfig_descriptor, + new java.lang.String[] { "PacketGenerator", "InputSidePacket", "ExternalInput", "OutputSidePacket", "ExternalOutput", "Options", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/StreamHandler.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/StreamHandler.java new file mode 100644 index 000000000..03b0fccd6 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/StreamHandler.java @@ -0,0 +1,2089 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/framework/stream_handler.proto + +package mediapipe; + +public final class StreamHandler { + private StreamHandler() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface InputStreamHandlerConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.InputStreamHandlerConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + boolean hasInputStreamHandler(); + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + java.lang.String getInputStreamHandler(); + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + com.google.protobuf.ByteString + getInputStreamHandlerBytes(); + + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + boolean hasOptions(); + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + mediapipe.MediapipeOptions.MediaPipeOptions getOptions(); + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder(); + } + /** + *
+   * Settings specifying an input stream handler.
+   * 
+ * + * Protobuf type {@code mediapipe.InputStreamHandlerConfig} + */ + public static final class InputStreamHandlerConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.InputStreamHandlerConfig) + InputStreamHandlerConfigOrBuilder { + // Use InputStreamHandlerConfig.newBuilder() to construct. + private InputStreamHandlerConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InputStreamHandlerConfig() { + inputStreamHandler_ = "DefaultInputStreamHandler"; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private InputStreamHandlerConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + inputStreamHandler_ = bs; + break; + } + case 26: { + mediapipe.MediapipeOptions.MediaPipeOptions.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = options_.toBuilder(); + } + options_ = input.readMessage(mediapipe.MediapipeOptions.MediaPipeOptions.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(options_); + options_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.StreamHandler.internal_static_mediapipe_InputStreamHandlerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.StreamHandler.internal_static_mediapipe_InputStreamHandlerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.StreamHandler.InputStreamHandlerConfig.class, mediapipe.StreamHandler.InputStreamHandlerConfig.Builder.class); + } + + private int bitField0_; + public static final int INPUT_STREAM_HANDLER_FIELD_NUMBER = 1; + private volatile java.lang.Object inputStreamHandler_; + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public boolean hasInputStreamHandler() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public java.lang.String getInputStreamHandler() { + java.lang.Object ref = inputStreamHandler_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + inputStreamHandler_ = s; + } + return s; + } + } + /** + *
+     * Name of the registered input stream handler class.
+     * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public com.google.protobuf.ByteString + getInputStreamHandlerBytes() { + java.lang.Object ref = inputStreamHandler_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputStreamHandler_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OPTIONS_FIELD_NUMBER = 3; + private mediapipe.MediapipeOptions.MediaPipeOptions options_; + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions getOptions() { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + /** + *
+     * Options for the input stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder() { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasOptions()) { + if (!getOptions().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputStreamHandler_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(3, getOptions()); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputStreamHandler_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getOptions()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.StreamHandler.InputStreamHandlerConfig)) { + return super.equals(obj); + } + mediapipe.StreamHandler.InputStreamHandlerConfig other = (mediapipe.StreamHandler.InputStreamHandlerConfig) obj; + + boolean result = true; + result = result && (hasInputStreamHandler() == other.hasInputStreamHandler()); + if (hasInputStreamHandler()) { + result = result && getInputStreamHandler() + .equals(other.getInputStreamHandler()); + } + result = result && (hasOptions() == other.hasOptions()); + if (hasOptions()) { + result = result && getOptions() + .equals(other.getOptions()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasInputStreamHandler()) { + hash = (37 * hash) + INPUT_STREAM_HANDLER_FIELD_NUMBER; + hash = (53 * hash) + getInputStreamHandler().hashCode(); + } + if (hasOptions()) { + hash = (37 * hash) + OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptions().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.InputStreamHandlerConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.StreamHandler.InputStreamHandlerConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Settings specifying an input stream handler.
+     * 
+ * + * Protobuf type {@code mediapipe.InputStreamHandlerConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.InputStreamHandlerConfig) + mediapipe.StreamHandler.InputStreamHandlerConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.StreamHandler.internal_static_mediapipe_InputStreamHandlerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.StreamHandler.internal_static_mediapipe_InputStreamHandlerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.StreamHandler.InputStreamHandlerConfig.class, mediapipe.StreamHandler.InputStreamHandlerConfig.Builder.class); + } + + // Construct using mediapipe.StreamHandler.InputStreamHandlerConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getOptionsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + inputStreamHandler_ = "DefaultInputStreamHandler"; + bitField0_ = (bitField0_ & ~0x00000001); + if (optionsBuilder_ == null) { + options_ = null; + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.StreamHandler.internal_static_mediapipe_InputStreamHandlerConfig_descriptor; + } + + public mediapipe.StreamHandler.InputStreamHandlerConfig getDefaultInstanceForType() { + return mediapipe.StreamHandler.InputStreamHandlerConfig.getDefaultInstance(); + } + + public mediapipe.StreamHandler.InputStreamHandlerConfig build() { + mediapipe.StreamHandler.InputStreamHandlerConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.StreamHandler.InputStreamHandlerConfig buildPartial() { + mediapipe.StreamHandler.InputStreamHandlerConfig result = new mediapipe.StreamHandler.InputStreamHandlerConfig(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.inputStreamHandler_ = inputStreamHandler_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (optionsBuilder_ == null) { + result.options_ = options_; + } else { + result.options_ = optionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.StreamHandler.InputStreamHandlerConfig) { + return mergeFrom((mediapipe.StreamHandler.InputStreamHandlerConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.StreamHandler.InputStreamHandlerConfig other) { + if (other == mediapipe.StreamHandler.InputStreamHandlerConfig.getDefaultInstance()) return this; + if (other.hasInputStreamHandler()) { + bitField0_ |= 0x00000001; + inputStreamHandler_ = other.inputStreamHandler_; + onChanged(); + } + if (other.hasOptions()) { + mergeOptions(other.getOptions()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasOptions()) { + if (!getOptions().isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.StreamHandler.InputStreamHandlerConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.StreamHandler.InputStreamHandlerConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object inputStreamHandler_ = "DefaultInputStreamHandler"; + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public boolean hasInputStreamHandler() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public java.lang.String getInputStreamHandler() { + java.lang.Object ref = inputStreamHandler_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + inputStreamHandler_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public com.google.protobuf.ByteString + getInputStreamHandlerBytes() { + java.lang.Object ref = inputStreamHandler_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + inputStreamHandler_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public Builder setInputStreamHandler( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + inputStreamHandler_ = value; + onChanged(); + return this; + } + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public Builder clearInputStreamHandler() { + bitField0_ = (bitField0_ & ~0x00000001); + inputStreamHandler_ = getDefaultInstance().getInputStreamHandler(); + onChanged(); + return this; + } + /** + *
+       * Name of the registered input stream handler class.
+       * 
+ * + * optional string input_stream_handler = 1 [default = "DefaultInputStreamHandler"]; + */ + public Builder setInputStreamHandlerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + inputStreamHandler_ = value; + onChanged(); + return this; + } + + private mediapipe.MediapipeOptions.MediaPipeOptions options_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder> optionsBuilder_; + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions getOptions() { + if (optionsBuilder_ == null) { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } else { + return optionsBuilder_.getMessage(); + } + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder setOptions(mediapipe.MediapipeOptions.MediaPipeOptions value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + optionsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder setOptions( + mediapipe.MediapipeOptions.MediaPipeOptions.Builder builderForValue) { + if (optionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + optionsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder mergeOptions(mediapipe.MediapipeOptions.MediaPipeOptions value) { + if (optionsBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + options_ != null && + options_ != mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance()) { + options_ = + mediapipe.MediapipeOptions.MediaPipeOptions.newBuilder(options_).mergeFrom(value).buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + optionsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder clearOptions() { + if (optionsBuilder_ == null) { + options_ = null; + onChanged(); + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions.Builder getOptionsBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getOptionsFieldBuilder().getBuilder(); + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder() { + if (optionsBuilder_ != null) { + return optionsBuilder_.getMessageOrBuilder(); + } else { + return options_ == null ? + mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + } + /** + *
+       * Options for the input stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder> + getOptionsFieldBuilder() { + if (optionsBuilder_ == null) { + optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder>( + getOptions(), + getParentForChildren(), + isClean()); + options_ = null; + } + return optionsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.InputStreamHandlerConfig) + } + + // @@protoc_insertion_point(class_scope:mediapipe.InputStreamHandlerConfig) + private static final mediapipe.StreamHandler.InputStreamHandlerConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.StreamHandler.InputStreamHandlerConfig(); + } + + public static mediapipe.StreamHandler.InputStreamHandlerConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public InputStreamHandlerConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InputStreamHandlerConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.StreamHandler.InputStreamHandlerConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface OutputStreamHandlerConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.OutputStreamHandlerConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + boolean hasOutputStreamHandler(); + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + java.lang.String getOutputStreamHandler(); + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + com.google.protobuf.ByteString + getOutputStreamHandlerBytes(); + + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + java.util.List + getInputSidePacketList(); + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + int getInputSidePacketCount(); + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + java.lang.String getInputSidePacket(int index); + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + com.google.protobuf.ByteString + getInputSidePacketBytes(int index); + + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + boolean hasOptions(); + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + mediapipe.MediapipeOptions.MediaPipeOptions getOptions(); + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder(); + } + /** + *
+   * Settings specifying an output stream handler.
+   * 
+ * + * Protobuf type {@code mediapipe.OutputStreamHandlerConfig} + */ + public static final class OutputStreamHandlerConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.OutputStreamHandlerConfig) + OutputStreamHandlerConfigOrBuilder { + // Use OutputStreamHandlerConfig.newBuilder() to construct. + private OutputStreamHandlerConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OutputStreamHandlerConfig() { + outputStreamHandler_ = "InOrderOutputStreamHandler"; + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OutputStreamHandlerConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + outputStreamHandler_ = bs; + break; + } + case 18: { + com.google.protobuf.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + inputSidePacket_.add(bs); + break; + } + case 26: { + mediapipe.MediapipeOptions.MediaPipeOptions.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = options_.toBuilder(); + } + options_ = input.readMessage(mediapipe.MediapipeOptions.MediaPipeOptions.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(options_); + options_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = inputSidePacket_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.StreamHandler.internal_static_mediapipe_OutputStreamHandlerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.StreamHandler.internal_static_mediapipe_OutputStreamHandlerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.StreamHandler.OutputStreamHandlerConfig.class, mediapipe.StreamHandler.OutputStreamHandlerConfig.Builder.class); + } + + private int bitField0_; + public static final int OUTPUT_STREAM_HANDLER_FIELD_NUMBER = 1; + private volatile java.lang.Object outputStreamHandler_; + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public boolean hasOutputStreamHandler() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public java.lang.String getOutputStreamHandler() { + java.lang.Object ref = outputStreamHandler_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputStreamHandler_ = s; + } + return s; + } + } + /** + *
+     * Name of the registered output stream handler class.
+     * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public com.google.protobuf.ByteString + getOutputStreamHandlerBytes() { + java.lang.Object ref = outputStreamHandler_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputStreamHandler_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INPUT_SIDE_PACKET_FIELD_NUMBER = 2; + private com.google.protobuf.LazyStringList inputSidePacket_; + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ProtocolStringList + getInputSidePacketList() { + return inputSidePacket_; + } + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public int getInputSidePacketCount() { + return inputSidePacket_.size(); + } + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public java.lang.String getInputSidePacket(int index) { + return inputSidePacket_.get(index); + } + /** + *
+     * Names of the input side packets for the handler specifically and distinct
+     * from the side packets for the calculator (but could be shared).
+     * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ByteString + getInputSidePacketBytes(int index) { + return inputSidePacket_.getByteString(index); + } + + public static final int OPTIONS_FIELD_NUMBER = 3; + private mediapipe.MediapipeOptions.MediaPipeOptions options_; + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions getOptions() { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + /** + *
+     * Options for the output stream handler.
+     * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder() { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasOptions()) { + if (!getOptions().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, outputStreamHandler_); + } + for (int i = 0; i < inputSidePacket_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputSidePacket_.getRaw(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(3, getOptions()); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, outputStreamHandler_); + } + { + int dataSize = 0; + for (int i = 0; i < inputSidePacket_.size(); i++) { + dataSize += computeStringSizeNoTag(inputSidePacket_.getRaw(i)); + } + size += dataSize; + size += 1 * getInputSidePacketList().size(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getOptions()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.StreamHandler.OutputStreamHandlerConfig)) { + return super.equals(obj); + } + mediapipe.StreamHandler.OutputStreamHandlerConfig other = (mediapipe.StreamHandler.OutputStreamHandlerConfig) obj; + + boolean result = true; + result = result && (hasOutputStreamHandler() == other.hasOutputStreamHandler()); + if (hasOutputStreamHandler()) { + result = result && getOutputStreamHandler() + .equals(other.getOutputStreamHandler()); + } + result = result && getInputSidePacketList() + .equals(other.getInputSidePacketList()); + result = result && (hasOptions() == other.hasOptions()); + if (hasOptions()) { + result = result && getOptions() + .equals(other.getOptions()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOutputStreamHandler()) { + hash = (37 * hash) + OUTPUT_STREAM_HANDLER_FIELD_NUMBER; + hash = (53 * hash) + getOutputStreamHandler().hashCode(); + } + if (getInputSidePacketCount() > 0) { + hash = (37 * hash) + INPUT_SIDE_PACKET_FIELD_NUMBER; + hash = (53 * hash) + getInputSidePacketList().hashCode(); + } + if (hasOptions()) { + hash = (37 * hash) + OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptions().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.StreamHandler.OutputStreamHandlerConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.StreamHandler.OutputStreamHandlerConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Settings specifying an output stream handler.
+     * 
+ * + * Protobuf type {@code mediapipe.OutputStreamHandlerConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.OutputStreamHandlerConfig) + mediapipe.StreamHandler.OutputStreamHandlerConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.StreamHandler.internal_static_mediapipe_OutputStreamHandlerConfig_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.StreamHandler.internal_static_mediapipe_OutputStreamHandlerConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.StreamHandler.OutputStreamHandlerConfig.class, mediapipe.StreamHandler.OutputStreamHandlerConfig.Builder.class); + } + + // Construct using mediapipe.StreamHandler.OutputStreamHandlerConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getOptionsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + outputStreamHandler_ = "InOrderOutputStreamHandler"; + bitField0_ = (bitField0_ & ~0x00000001); + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + if (optionsBuilder_ == null) { + options_ = null; + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.StreamHandler.internal_static_mediapipe_OutputStreamHandlerConfig_descriptor; + } + + public mediapipe.StreamHandler.OutputStreamHandlerConfig getDefaultInstanceForType() { + return mediapipe.StreamHandler.OutputStreamHandlerConfig.getDefaultInstance(); + } + + public mediapipe.StreamHandler.OutputStreamHandlerConfig build() { + mediapipe.StreamHandler.OutputStreamHandlerConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.StreamHandler.OutputStreamHandlerConfig buildPartial() { + mediapipe.StreamHandler.OutputStreamHandlerConfig result = new mediapipe.StreamHandler.OutputStreamHandlerConfig(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.outputStreamHandler_ = outputStreamHandler_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = inputSidePacket_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.inputSidePacket_ = inputSidePacket_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + if (optionsBuilder_ == null) { + result.options_ = options_; + } else { + result.options_ = optionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.StreamHandler.OutputStreamHandlerConfig) { + return mergeFrom((mediapipe.StreamHandler.OutputStreamHandlerConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.StreamHandler.OutputStreamHandlerConfig other) { + if (other == mediapipe.StreamHandler.OutputStreamHandlerConfig.getDefaultInstance()) return this; + if (other.hasOutputStreamHandler()) { + bitField0_ |= 0x00000001; + outputStreamHandler_ = other.outputStreamHandler_; + onChanged(); + } + if (!other.inputSidePacket_.isEmpty()) { + if (inputSidePacket_.isEmpty()) { + inputSidePacket_ = other.inputSidePacket_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureInputSidePacketIsMutable(); + inputSidePacket_.addAll(other.inputSidePacket_); + } + onChanged(); + } + if (other.hasOptions()) { + mergeOptions(other.getOptions()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasOptions()) { + if (!getOptions().isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.StreamHandler.OutputStreamHandlerConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.StreamHandler.OutputStreamHandlerConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object outputStreamHandler_ = "InOrderOutputStreamHandler"; + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public boolean hasOutputStreamHandler() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public java.lang.String getOutputStreamHandler() { + java.lang.Object ref = outputStreamHandler_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputStreamHandler_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public com.google.protobuf.ByteString + getOutputStreamHandlerBytes() { + java.lang.Object ref = outputStreamHandler_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputStreamHandler_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public Builder setOutputStreamHandler( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + outputStreamHandler_ = value; + onChanged(); + return this; + } + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public Builder clearOutputStreamHandler() { + bitField0_ = (bitField0_ & ~0x00000001); + outputStreamHandler_ = getDefaultInstance().getOutputStreamHandler(); + onChanged(); + return this; + } + /** + *
+       * Name of the registered output stream handler class.
+       * 
+ * + * optional string output_stream_handler = 1 [default = "InOrderOutputStreamHandler"]; + */ + public Builder setOutputStreamHandlerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + outputStreamHandler_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureInputSidePacketIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + inputSidePacket_ = new com.google.protobuf.LazyStringArrayList(inputSidePacket_); + bitField0_ |= 0x00000002; + } + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ProtocolStringList + getInputSidePacketList() { + return inputSidePacket_.getUnmodifiableView(); + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public int getInputSidePacketCount() { + return inputSidePacket_.size(); + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public java.lang.String getInputSidePacket(int index) { + return inputSidePacket_.get(index); + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public com.google.protobuf.ByteString + getInputSidePacketBytes(int index) { + return inputSidePacket_.getByteString(index); + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder setInputSidePacket( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addInputSidePacket( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.add(value); + onChanged(); + return this; + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addAllInputSidePacket( + java.lang.Iterable values) { + ensureInputSidePacketIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, inputSidePacket_); + onChanged(); + return this; + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder clearInputSidePacket() { + inputSidePacket_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * Names of the input side packets for the handler specifically and distinct
+       * from the side packets for the calculator (but could be shared).
+       * 
+ * + * repeated string input_side_packet = 2; + */ + public Builder addInputSidePacketBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputSidePacketIsMutable(); + inputSidePacket_.add(value); + onChanged(); + return this; + } + + private mediapipe.MediapipeOptions.MediaPipeOptions options_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder> optionsBuilder_; + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public boolean hasOptions() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions getOptions() { + if (optionsBuilder_ == null) { + return options_ == null ? mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } else { + return optionsBuilder_.getMessage(); + } + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder setOptions(mediapipe.MediapipeOptions.MediaPipeOptions value) { + if (optionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + optionsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder setOptions( + mediapipe.MediapipeOptions.MediaPipeOptions.Builder builderForValue) { + if (optionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + optionsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder mergeOptions(mediapipe.MediapipeOptions.MediaPipeOptions value) { + if (optionsBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + options_ != null && + options_ != mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance()) { + options_ = + mediapipe.MediapipeOptions.MediaPipeOptions.newBuilder(options_).mergeFrom(value).buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + optionsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public Builder clearOptions() { + if (optionsBuilder_ == null) { + options_ = null; + onChanged(); + } else { + optionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptions.Builder getOptionsBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getOptionsFieldBuilder().getBuilder(); + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + public mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder getOptionsOrBuilder() { + if (optionsBuilder_ != null) { + return optionsBuilder_.getMessageOrBuilder(); + } else { + return options_ == null ? + mediapipe.MediapipeOptions.MediaPipeOptions.getDefaultInstance() : options_; + } + } + /** + *
+       * Options for the output stream handler.
+       * 
+ * + * optional .mediapipe.MediaPipeOptions options = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder> + getOptionsFieldBuilder() { + if (optionsBuilder_ == null) { + optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + mediapipe.MediapipeOptions.MediaPipeOptions, mediapipe.MediapipeOptions.MediaPipeOptions.Builder, mediapipe.MediapipeOptions.MediaPipeOptionsOrBuilder>( + getOptions(), + getParentForChildren(), + isClean()); + options_ = null; + } + return optionsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.OutputStreamHandlerConfig) + } + + // @@protoc_insertion_point(class_scope:mediapipe.OutputStreamHandlerConfig) + private static final mediapipe.StreamHandler.OutputStreamHandlerConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.StreamHandler.OutputStreamHandlerConfig(); + } + + public static mediapipe.StreamHandler.OutputStreamHandlerConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OutputStreamHandlerConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OutputStreamHandlerConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.StreamHandler.OutputStreamHandlerConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_InputStreamHandlerConfig_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_InputStreamHandlerConfig_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_OutputStreamHandlerConfig_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_OutputStreamHandlerConfig_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n(mediapipe/framework/stream_handler.pro" + + "to\022\tmediapipe\032+mediapipe/framework/media" + + "pipe_options.proto\"\201\001\n\030InputStreamHandle" + + "rConfig\0227\n\024input_stream_handler\030\001 \001(\t:\031D" + + "efaultInputStreamHandler\022,\n\007options\030\003 \001(" + + "\0132\033.mediapipe.MediaPipeOptions\"\237\001\n\031Outpu" + + "tStreamHandlerConfig\0229\n\025output_stream_ha" + + "ndler\030\001 \001(\t:\032InOrderOutputStreamHandler\022" + + "\031\n\021input_side_packet\030\002 \003(\t\022,\n\007options\030\003 " + + "\001(\0132\033.mediapipe.MediaPipeOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + mediapipe.MediapipeOptions.getDescriptor(), + }, assigner); + internal_static_mediapipe_InputStreamHandlerConfig_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_InputStreamHandlerConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_InputStreamHandlerConfig_descriptor, + new java.lang.String[] { "InputStreamHandler", "Options", }); + internal_static_mediapipe_OutputStreamHandlerConfig_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_mediapipe_OutputStreamHandlerConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_OutputStreamHandlerConfig_descriptor, + new java.lang.String[] { "OutputStreamHandler", "InputSidePacket", "Options", }); + mediapipe.MediapipeOptions.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/TestCalculators.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/TestCalculators.java new file mode 100644 index 000000000..7a5d45fc5 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/proto/TestCalculators.java @@ -0,0 +1,894 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mediapipe/framework/test_calculators.proto + +package mediapipe; + +public final class TestCalculators { + private TestCalculators() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + registry.add(mediapipe.TestCalculators.RandomMatrixCalculatorOptions.ext); + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface RandomMatrixCalculatorOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:mediapipe.RandomMatrixCalculatorOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 rows = 1; + */ + boolean hasRows(); + /** + * optional int32 rows = 1; + */ + int getRows(); + + /** + * optional int32 cols = 2; + */ + boolean hasCols(); + /** + * optional int32 cols = 2; + */ + int getCols(); + + /** + * optional int64 start_timestamp = 3; + */ + boolean hasStartTimestamp(); + /** + * optional int64 start_timestamp = 3; + */ + long getStartTimestamp(); + + /** + * optional int64 limit_timestamp = 4; + */ + boolean hasLimitTimestamp(); + /** + * optional int64 limit_timestamp = 4; + */ + long getLimitTimestamp(); + + /** + * optional int64 timestamp_step = 5; + */ + boolean hasTimestampStep(); + /** + * optional int64 timestamp_step = 5; + */ + long getTimestampStep(); + } + /** + * Protobuf type {@code mediapipe.RandomMatrixCalculatorOptions} + */ + public static final class RandomMatrixCalculatorOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:mediapipe.RandomMatrixCalculatorOptions) + RandomMatrixCalculatorOptionsOrBuilder { + // Use RandomMatrixCalculatorOptions.newBuilder() to construct. + private RandomMatrixCalculatorOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RandomMatrixCalculatorOptions() { + rows_ = 0; + cols_ = 0; + startTimestamp_ = 0L; + limitTimestamp_ = 0L; + timestampStep_ = 0L; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RandomMatrixCalculatorOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + rows_ = input.readInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + cols_ = input.readInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + startTimestamp_ = input.readInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + limitTimestamp_ = input.readInt64(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + timestampStep_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class, mediapipe.TestCalculators.RandomMatrixCalculatorOptions.Builder.class); + } + + private int bitField0_; + public static final int ROWS_FIELD_NUMBER = 1; + private int rows_; + /** + * optional int32 rows = 1; + */ + public boolean hasRows() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 rows = 1; + */ + public int getRows() { + return rows_; + } + + public static final int COLS_FIELD_NUMBER = 2; + private int cols_; + /** + * optional int32 cols = 2; + */ + public boolean hasCols() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int32 cols = 2; + */ + public int getCols() { + return cols_; + } + + public static final int START_TIMESTAMP_FIELD_NUMBER = 3; + private long startTimestamp_; + /** + * optional int64 start_timestamp = 3; + */ + public boolean hasStartTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int64 start_timestamp = 3; + */ + public long getStartTimestamp() { + return startTimestamp_; + } + + public static final int LIMIT_TIMESTAMP_FIELD_NUMBER = 4; + private long limitTimestamp_; + /** + * optional int64 limit_timestamp = 4; + */ + public boolean hasLimitTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int64 limit_timestamp = 4; + */ + public long getLimitTimestamp() { + return limitTimestamp_; + } + + public static final int TIMESTAMP_STEP_FIELD_NUMBER = 5; + private long timestampStep_; + /** + * optional int64 timestamp_step = 5; + */ + public boolean hasTimestampStep() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int64 timestamp_step = 5; + */ + public long getTimestampStep() { + return timestampStep_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, rows_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt32(2, cols_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeInt64(3, startTimestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeInt64(4, limitTimestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeInt64(5, timestampStep_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, rows_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, cols_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, startTimestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, limitTimestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(5, timestampStep_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof mediapipe.TestCalculators.RandomMatrixCalculatorOptions)) { + return super.equals(obj); + } + mediapipe.TestCalculators.RandomMatrixCalculatorOptions other = (mediapipe.TestCalculators.RandomMatrixCalculatorOptions) obj; + + boolean result = true; + result = result && (hasRows() == other.hasRows()); + if (hasRows()) { + result = result && (getRows() + == other.getRows()); + } + result = result && (hasCols() == other.hasCols()); + if (hasCols()) { + result = result && (getCols() + == other.getCols()); + } + result = result && (hasStartTimestamp() == other.hasStartTimestamp()); + if (hasStartTimestamp()) { + result = result && (getStartTimestamp() + == other.getStartTimestamp()); + } + result = result && (hasLimitTimestamp() == other.hasLimitTimestamp()); + if (hasLimitTimestamp()) { + result = result && (getLimitTimestamp() + == other.getLimitTimestamp()); + } + result = result && (hasTimestampStep() == other.hasTimestampStep()); + if (hasTimestampStep()) { + result = result && (getTimestampStep() + == other.getTimestampStep()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRows()) { + hash = (37 * hash) + ROWS_FIELD_NUMBER; + hash = (53 * hash) + getRows(); + } + if (hasCols()) { + hash = (37 * hash) + COLS_FIELD_NUMBER; + hash = (53 * hash) + getCols(); + } + if (hasStartTimestamp()) { + hash = (37 * hash) + START_TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStartTimestamp()); + } + if (hasLimitTimestamp()) { + hash = (37 * hash) + LIMIT_TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLimitTimestamp()); + } + if (hasTimestampStep()) { + hash = (37 * hash) + TIMESTAMP_STEP_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestampStep()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(mediapipe.TestCalculators.RandomMatrixCalculatorOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code mediapipe.RandomMatrixCalculatorOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:mediapipe.RandomMatrixCalculatorOptions) + mediapipe.TestCalculators.RandomMatrixCalculatorOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class, mediapipe.TestCalculators.RandomMatrixCalculatorOptions.Builder.class); + } + + // Construct using mediapipe.TestCalculators.RandomMatrixCalculatorOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + rows_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + cols_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + startTimestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + limitTimestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + timestampStep_ = 0L; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor; + } + + public mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstanceForType() { + return mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance(); + } + + public mediapipe.TestCalculators.RandomMatrixCalculatorOptions build() { + mediapipe.TestCalculators.RandomMatrixCalculatorOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public mediapipe.TestCalculators.RandomMatrixCalculatorOptions buildPartial() { + mediapipe.TestCalculators.RandomMatrixCalculatorOptions result = new mediapipe.TestCalculators.RandomMatrixCalculatorOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rows_ = rows_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.cols_ = cols_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.startTimestamp_ = startTimestamp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.limitTimestamp_ = limitTimestamp_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.timestampStep_ = timestampStep_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof mediapipe.TestCalculators.RandomMatrixCalculatorOptions) { + return mergeFrom((mediapipe.TestCalculators.RandomMatrixCalculatorOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(mediapipe.TestCalculators.RandomMatrixCalculatorOptions other) { + if (other == mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance()) return this; + if (other.hasRows()) { + setRows(other.getRows()); + } + if (other.hasCols()) { + setCols(other.getCols()); + } + if (other.hasStartTimestamp()) { + setStartTimestamp(other.getStartTimestamp()); + } + if (other.hasLimitTimestamp()) { + setLimitTimestamp(other.getLimitTimestamp()); + } + if (other.hasTimestampStep()) { + setTimestampStep(other.getTimestampStep()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + mediapipe.TestCalculators.RandomMatrixCalculatorOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (mediapipe.TestCalculators.RandomMatrixCalculatorOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int rows_ ; + /** + * optional int32 rows = 1; + */ + public boolean hasRows() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int32 rows = 1; + */ + public int getRows() { + return rows_; + } + /** + * optional int32 rows = 1; + */ + public Builder setRows(int value) { + bitField0_ |= 0x00000001; + rows_ = value; + onChanged(); + return this; + } + /** + * optional int32 rows = 1; + */ + public Builder clearRows() { + bitField0_ = (bitField0_ & ~0x00000001); + rows_ = 0; + onChanged(); + return this; + } + + private int cols_ ; + /** + * optional int32 cols = 2; + */ + public boolean hasCols() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int32 cols = 2; + */ + public int getCols() { + return cols_; + } + /** + * optional int32 cols = 2; + */ + public Builder setCols(int value) { + bitField0_ |= 0x00000002; + cols_ = value; + onChanged(); + return this; + } + /** + * optional int32 cols = 2; + */ + public Builder clearCols() { + bitField0_ = (bitField0_ & ~0x00000002); + cols_ = 0; + onChanged(); + return this; + } + + private long startTimestamp_ ; + /** + * optional int64 start_timestamp = 3; + */ + public boolean hasStartTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int64 start_timestamp = 3; + */ + public long getStartTimestamp() { + return startTimestamp_; + } + /** + * optional int64 start_timestamp = 3; + */ + public Builder setStartTimestamp(long value) { + bitField0_ |= 0x00000004; + startTimestamp_ = value; + onChanged(); + return this; + } + /** + * optional int64 start_timestamp = 3; + */ + public Builder clearStartTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + startTimestamp_ = 0L; + onChanged(); + return this; + } + + private long limitTimestamp_ ; + /** + * optional int64 limit_timestamp = 4; + */ + public boolean hasLimitTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int64 limit_timestamp = 4; + */ + public long getLimitTimestamp() { + return limitTimestamp_; + } + /** + * optional int64 limit_timestamp = 4; + */ + public Builder setLimitTimestamp(long value) { + bitField0_ |= 0x00000008; + limitTimestamp_ = value; + onChanged(); + return this; + } + /** + * optional int64 limit_timestamp = 4; + */ + public Builder clearLimitTimestamp() { + bitField0_ = (bitField0_ & ~0x00000008); + limitTimestamp_ = 0L; + onChanged(); + return this; + } + + private long timestampStep_ ; + /** + * optional int64 timestamp_step = 5; + */ + public boolean hasTimestampStep() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int64 timestamp_step = 5; + */ + public long getTimestampStep() { + return timestampStep_; + } + /** + * optional int64 timestamp_step = 5; + */ + public Builder setTimestampStep(long value) { + bitField0_ |= 0x00000010; + timestampStep_ = value; + onChanged(); + return this; + } + /** + * optional int64 timestamp_step = 5; + */ + public Builder clearTimestampStep() { + bitField0_ = (bitField0_ & ~0x00000010); + timestampStep_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:mediapipe.RandomMatrixCalculatorOptions) + } + + // @@protoc_insertion_point(class_scope:mediapipe.RandomMatrixCalculatorOptions) + private static final mediapipe.TestCalculators.RandomMatrixCalculatorOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new mediapipe.TestCalculators.RandomMatrixCalculatorOptions(); + } + + public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RandomMatrixCalculatorOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RandomMatrixCalculatorOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + public static final int EXT_FIELD_NUMBER = 52056136; + /** + * extend .mediapipe.CalculatorOptions { ... } + */ + public static final + com.google.protobuf.GeneratedMessage.GeneratedExtension< + com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions, + mediapipe.TestCalculators.RandomMatrixCalculatorOptions> ext = com.google.protobuf.GeneratedMessage + .newMessageScopedGeneratedExtension( + mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance(), + 0, + mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class, + mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance()); + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*mediapipe/framework/test_calculators.p" + + "roto\022\tmediapipe\032$mediapipe/framework/cal" + + "culator.proto\"\335\001\n\035RandomMatrixCalculator" + + "Options\022\014\n\004rows\030\001 \001(\005\022\014\n\004cols\030\002 \001(\005\022\027\n\017s" + + "tart_timestamp\030\003 \001(\003\022\027\n\017limit_timestamp\030" + + "\004 \001(\003\022\026\n\016timestamp_step\030\005 \001(\0032V\n\003ext\022\034.m" + + "ediapipe.CalculatorOptions\030\310\240\351\030 \001(\0132(.me" + + "diapipe.RandomMatrixCalculatorOptions" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.mediapipe.proto.CalculatorProto.getDescriptor(), + }, assigner); + internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor, + new java.lang.String[] { "Rows", "Cols", "StartTimestamp", "LimitTimestamp", "TimestampStep", }); + com.google.mediapipe.proto.CalculatorProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/layout/activity_main.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/layout/activity_main.xml new file mode 100644 index 000000000..ec0847368 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,21 @@ + + + + + + + \ No newline at end of file diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/colors.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/colors.xml new file mode 100644 index 000000000..69b22338c --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #008577 + #00574B + #D81B60 + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/strings.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/strings.xml new file mode 100644 index 000000000..6fbeee92b --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/strings.xml @@ -0,0 +1,5 @@ + + Hand Tracking GPU + Please grant camera permissions. + + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/styles.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/styles.xml new file mode 100644 index 000000000..5885930df --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/build.gradle b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/build.gradle new file mode 100644 index 000000000..8951a5a78 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/build.gradle @@ -0,0 +1,29 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + google() + jcenter() + + } + dependencies { + classpath 'com.android.tools.build:gradle:3.5.1' + classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.6' + + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle.properties b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle.properties new file mode 100644 index 000000000..199d16ede --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle.properties @@ -0,0 +1,20 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app's APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true +# Automatically convert third-party libraries to use AndroidX +android.enableJetifier=true + diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 000000000..f6b961fd5 Binary files /dev/null and b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.jar differ diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.properties b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 000000000..a515914e4 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Wed Oct 16 15:26:36 PKT 2019 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew new file mode 100644 index 000000000..cccdd3d51 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew.bat b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew.bat new file mode 100644 index 000000000..e95643d6a --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/local.properties b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/local.properties new file mode 100644 index 000000000..c0e209203 --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/local.properties @@ -0,0 +1,10 @@ +## This file is automatically generated by Android Studio. +# Do not modify this file -- YOUR CHANGES WILL BE ERASED! +# +# This file should *NOT* be checked into Version Control Systems, +# as it contains information specific to your local configuration. +# +# Location of the SDK. This is only used by Gradle. +# For customization when using a Version Control System, please read the +# header note. +sdk.dir=/home/ali/Android/Sdk diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/settings.gradle b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/settings.gradle new file mode 100644 index 000000000..ec8d3674d --- /dev/null +++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/settings.gradle @@ -0,0 +1,2 @@ +include ':app' +rootProject.name='Hand Tracking GPU'