diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml
new file mode 100644
index 000000000..350ccf071
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/Hand Tracking GPU.iml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml
new file mode 100644
index 000000000..3626cdf92
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/app.iml
@@ -0,0 +1,157 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ generateDebugSources
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle
new file mode 100644
index 000000000..72467514a
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/build.gradle
@@ -0,0 +1,53 @@
+apply plugin: 'com.android.application'
+
+android {
+ compileSdkVersion 29
+ buildToolsVersion "29.0.2"
+ defaultConfig {
+ applicationId "com.example.handtrackinggpu"
+ minSdkVersion 21
+ targetSdkVersion 29
+ versionCode 1
+ versionName "1.0"
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+
+ }
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+ }
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+}
+
+dependencies {
+ implementation fileTree(dir: 'libs', include: ['*.jar'])
+ implementation 'androidx.appcompat:appcompat:1.0.2'
+ implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
+ testImplementation 'junit:junit:4.12'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.0'
+ androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
+
+ implementation "com.google.guava:guava:28.1-jre"
+ implementation "com.google.flogger:flogger:0.4"
+ implementation 'com.google.protobuf:protobuf-java:3.5.1'
+
+ compileOnly 'org.glassfish:javax.annotation:10.0-b28'
+ implementation 'androidx.annotation:annotation:1.1.0'
+ implementation 'org.jetbrains:annotations:15.0'
+
+// implementation 'com.intellij:annotations:+@jar'
+
+ implementation "androidx.camera:camera-core:1.0.0-alpha06"
+ // If you want to use Camera2 extensions
+ implementation "androidx.camera:camera-camera2:1.0.0-alpha06"
+ // If you to use the Camera View class
+ implementation "androidx.camera:camera-view:1.0.0-alpha03"
+ // If you to use Camera Extensions
+ implementation "androidx.camera:camera-extensions:1.0.0-alpha03"
+
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro
new file mode 100644
index 000000000..f1b424510
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml
new file mode 100644
index 000000000..7e5e3ad05
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/AndroidManifest.xml
@@ -0,0 +1,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java
new file mode 100644
index 000000000..ff9e9c65d
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/example/handtrackinggpu/MainActivity.java
@@ -0,0 +1,173 @@
+package com.example.handtrackinggpu;
+
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+import android.graphics.SurfaceTexture;
+import android.os.Build;
+import android.os.Bundle;
+import android.util.Size;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.RequiresApi;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.mediapipe.components.CameraHelper;
+import com.google.mediapipe.components.CameraXPreviewHelper;
+import com.google.mediapipe.components.ExternalTextureConverter;
+import com.google.mediapipe.components.FrameProcessor;
+import com.google.mediapipe.components.PermissionHelper;
+import com.google.mediapipe.framework.AndroidAssetUtil;
+import com.google.mediapipe.glutil.EglManager;
+
+/** Main activity of MediaPipe example apps. */
+public class MainActivity extends AppCompatActivity {
+ private static final String TAG = "MainActivity";
+
+ private static final String BINARY_GRAPH_NAME = "handtrackinggpu.binarypb";
+ private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
+ private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
+ private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
+
+ // Flips the camera-preview frames vertically before sending them into FrameProcessor to be
+ // processed in a MediaPipe graph, and flips the processed frames back when they are displayed.
+ // This is needed because OpenGL represents images assuming the image origin is at the bottom-left
+ // corner, whereas MediaPipe in general assumes the image origin is at top-left.
+ private static final boolean FLIP_FRAMES_VERTICALLY = true;
+
+ static {
+ // Load all native libraries needed by the app.
+ System.loadLibrary("mediapipe_jni");
+ System.loadLibrary("opencv_java4");
+ }
+
+ // {@link SurfaceTexture} where the camera-preview frames can be accessed.
+ private SurfaceTexture previewFrameTexture;
+ // {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
+ private SurfaceView previewDisplayView;
+
+ // Creates and manages an {@link EGLContext}.
+ private EglManager eglManager;
+ // Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
+ // frames onto a {@link Surface}.
+ private FrameProcessor processor;
+ // Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
+ // consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
+ private ExternalTextureConverter converter;
+
+ // Handles camera access via the {@link CameraX} Jetpack support library.
+ private CameraXPreviewHelper cameraHelper;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+
+ previewDisplayView = new SurfaceView(this);
+ setupPreviewDisplayView();
+
+ // Initialize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
+ // binary graphs.
+ AndroidAssetUtil.initializeNativeAssetManager(this);
+
+ eglManager = new EglManager(null);
+ processor =
+ new FrameProcessor(
+ this,
+ eglManager.getNativeContext(),
+ BINARY_GRAPH_NAME,
+ INPUT_VIDEO_STREAM_NAME,
+ OUTPUT_VIDEO_STREAM_NAME);
+ processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
+
+ PermissionHelper.checkAndRequestCameraPermissions(this);
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ converter = new ExternalTextureConverter(eglManager.getContext());
+ converter.setFlipY(FLIP_FRAMES_VERTICALLY);
+ converter.setConsumer(processor);
+ if (PermissionHelper.cameraPermissionsGranted(this)) {
+ startCamera();
+ }
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ converter.close();
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ }
+
+ private void setupPreviewDisplayView() {
+ previewDisplayView.setVisibility(View.GONE);
+ ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
+ viewGroup.addView(previewDisplayView);
+
+ previewDisplayView
+ .getHolder()
+ .addCallback(
+ new SurfaceHolder.Callback() {
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
+ }
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ // (Re-)Compute the ideal size of the camera-preview display (the area that the
+ // camera-preview frames get rendered onto, potentially with scaling and rotation)
+ // based on the size of the SurfaceView that contains the display.
+ Size viewSize = new Size(width, height);
+ Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
+
+ // Connect the converter to the camera-preview frames as its input (via
+ // previewFrameTexture), and configure the output width and height as the computed
+ // display size.
+ converter.setSurfaceTextureAndAttachToGLContext(
+ previewFrameTexture, displaySize.getWidth(), displaySize.getHeight());
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ processor.getVideoSurfaceOutput().setSurface(null);
+ }
+ });
+ }
+
+ private void startCamera() {
+ cameraHelper = new CameraXPreviewHelper();
+ cameraHelper.setOnCameraStartedListener(
+ surfaceTexture -> {
+ previewFrameTexture = surfaceTexture;
+ // Make the display view visible to start showing the preview. This triggers the
+ // SurfaceHolder.Callback added to (the holder of) previewDisplayView.
+ previewDisplayView.setVisibility(View.VISIBLE);
+ });
+ cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null);
+ }
+}
\ No newline at end of file
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD
new file mode 100644
index 000000000..80b65e3d4
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/BUILD
@@ -0,0 +1,70 @@
+# Copyright 2019 The MediaPipe Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+licenses(["notice"]) # Apache 2.0
+
+android_library(
+ name = "android_components",
+ srcs = glob(
+ ["*.java"],
+ exclude = [
+ "CameraHelper.java",
+ "CameraXPreviewHelper.java",
+ ],
+ ),
+ visibility = ["//visibility:public"],
+ deps = [
+ "//mediapipe/java/com/google/mediapipe/framework:android_framework",
+ "//mediapipe/java/com/google/mediapipe/glutil",
+ "//third_party:androidx_appcompat",
+ "//third_party:androidx_core",
+ "//third_party:androidx_legacy_support_v4",
+ "//third_party:androidx_recyclerview",
+ "@com_google_code_findbugs//jar",
+ "@com_google_guava_android//jar",
+ ],
+)
+
+# Note: We need to separate the camera helper files in a different BUILD target because CameraX has a minimum Android API
+# requirement of API 21. Users of android_components may have different API dependencies.
+android_library(
+ name = "android_camerax_helper",
+ srcs = [
+ "CameraHelper.java",
+ "CameraXPreviewHelper.java",
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:androidx_appcompat",
+ "//third_party:androidx_legacy_support_v4",
+ "//third_party:camera2",
+ "//third_party:camerax_core",
+ "@androidx_concurrent_futures//jar",
+ "@androidx_lifecycle//jar",
+ "@com_google_code_findbugs//jar",
+ "@com_google_guava_android//jar",
+ ],
+)
+
+android_library(
+ name = "android_microphone_helper",
+ srcs = [
+ "MicrophoneHelper.java",
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ "@com_google_code_findbugs//jar",
+ "@com_google_guava_android//jar",
+ ],
+)
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java
new file mode 100644
index 000000000..980ad8754
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraHelper.java
@@ -0,0 +1,63 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.util.Size;
+import javax.annotation.Nullable;
+
+/** Abstract interface for a helper class that manages camera access. */
+public abstract class CameraHelper {
+ /** The listener is called when camera start is complete. */
+ public interface OnCameraStartedListener {
+ /**
+ * Called when camera start is complete and the camera-preview frames can be accessed from the
+ * surfaceTexture. The surfaceTexture can be null if it is not prepared by the CameraHelper.
+ */
+ public void onCameraStarted(@Nullable SurfaceTexture surfaceTexture);
+ }
+
+ protected static final String TAG = "CameraHelper";
+
+ /** Represents the direction the camera faces relative to device screen. */
+ public static enum CameraFacing {
+ FRONT,
+ BACK
+ };
+
+ protected OnCameraStartedListener onCameraStartedListener;
+
+ protected CameraFacing cameraFacing;
+
+ /**
+ * Initializes the camera and sets it up for accessing frames from a custom SurfaceTexture object.
+ * The SurfaceTexture object can be null when it is the CameraHelper that prepares a
+ * SurfaceTexture object for grabbing frames.
+ */
+ public abstract void startCamera(
+ Activity context, CameraFacing cameraFacing, @Nullable SurfaceTexture surfaceTexture);
+
+ /**
+ * Computes the ideal size of the camera-preview display (the area that the camera-preview frames
+ * get rendered onto, potentially with scaling and rotation) based on the size of the view
+ * containing the display. Returns the computed display size.
+ */
+ public abstract Size computeDisplaySizeFromViewSize(Size viewSize);
+
+ public void setOnCameraStartedListener(@Nullable OnCameraStartedListener listener) {
+ onCameraStartedListener = listener;
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java
new file mode 100644
index 000000000..10bf3d1fc
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/CameraXPreviewHelper.java
@@ -0,0 +1,102 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.app.Activity;
+import androidx.lifecycle.LifecycleOwner;
+import android.graphics.SurfaceTexture;
+import android.util.Log;
+import android.util.Size;
+import androidx.camera.core.CameraX;
+import androidx.camera.core.CameraX.LensFacing;
+import androidx.camera.core.Preview;
+import androidx.camera.core.PreviewConfig;
+
+/**
+ * Uses CameraX APIs for camera setup and access.
+ *
+ *
{@link CameraX} connects to the camera and provides video frames.
+ */
+public class CameraXPreviewHelper extends CameraHelper {
+ private static final String TAG = "CameraXPreviewHelper";
+
+ private Preview preview;
+
+ // Size of the camera-preview frames from the camera.
+ private Size frameSize;
+ // Rotation of the camera-preview frames in degrees.
+ private int frameRotation;
+
+ @Override
+ @SuppressWarnings("RestrictTo") // See b/132705545.
+ public void startCamera(
+ Activity context, CameraFacing cameraFacing, SurfaceTexture surfaceTexture) {
+ LensFacing cameraLensFacing =
+ cameraFacing == CameraHelper.CameraFacing.FRONT ? LensFacing.FRONT : LensFacing.BACK;
+ PreviewConfig previewConfig =
+ new PreviewConfig.Builder().setLensFacing(cameraLensFacing).build();
+ preview = new Preview(previewConfig);
+
+ preview.setOnPreviewOutputUpdateListener(
+ previewOutput -> {
+ if (!previewOutput.getTextureSize().equals(frameSize)) {
+ frameSize = previewOutput.getTextureSize();
+ frameRotation = previewOutput.getRotationDegrees();
+ if (frameSize.getWidth() == 0 || frameSize.getHeight() == 0) {
+ // Invalid frame size. Wait for valid input dimensions before updating display size.
+ Log.d(TAG, "Invalid frameSize.");
+ return;
+ }
+ }
+ if (onCameraStartedListener != null) {
+ onCameraStartedListener.onCameraStarted(previewOutput.getSurfaceTexture());
+ }
+ });
+ CameraX.bindToLifecycle(/*lifecycleOwner=*/ (LifecycleOwner) context, preview);
+ }
+
+ @Override
+ public Size computeDisplaySizeFromViewSize(Size viewSize) {
+ if (viewSize == null || frameSize == null) {
+ // Wait for all inputs before setting display size.
+ Log.d(TAG, "viewSize or frameSize is null.");
+ return null;
+ }
+
+ // Valid rotation values are 0, 90, 180 and 270.
+ // Frames are rotated relative to the device's "natural" landscape orientation. When in portrait
+ // mode, valid rotation values are 90 or 270, and the width/height should be swapped to
+ // calculate aspect ratio.
+ float frameAspectRatio =
+ frameRotation == 90 || frameRotation == 270
+ ? frameSize.getHeight() / (float) frameSize.getWidth()
+ : frameSize.getWidth() / (float) frameSize.getHeight();
+
+ float viewAspectRatio = viewSize.getWidth() / (float) viewSize.getHeight();
+
+ // Match shortest sides together.
+ int scaledWidth;
+ int scaledHeight;
+ if (frameAspectRatio < viewAspectRatio) {
+ scaledWidth = viewSize.getWidth();
+ scaledHeight = Math.round(viewSize.getWidth() / frameAspectRatio);
+ } else {
+ scaledHeight = viewSize.getHeight();
+ scaledWidth = Math.round(viewSize.getHeight() * frameAspectRatio);
+ }
+
+ return new Size(scaledWidth, scaledHeight);
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java
new file mode 100644
index 000000000..122f598ea
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/ExternalTextureConverter.java
@@ -0,0 +1,373 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+import com.google.mediapipe.framework.AppTextureFrame;
+import com.google.mediapipe.glutil.ExternalTextureRenderer;
+import com.google.mediapipe.glutil.GlThread;
+import com.google.mediapipe.glutil.ShaderUtil;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Textures from {@link SurfaceTexture} are only supposed to be bound to target {@link
+ * GLES11Ext#GL_TEXTURE_EXTERNAL_OES}, which is accessed using samplerExternalOES in the shader.
+ * This means they cannot be used with a regular shader that expects a sampler2D. This class creates
+ * a copy of the texture that can be used with {@link GLES20#GL_TEXTURE_2D} and sampler2D.
+ */
+public class ExternalTextureConverter implements TextureFrameProducer {
+ private static final String TAG = "ExternalTextureConv"; // Max length of a tag is 23.
+ private static final int DEFAULT_NUM_BUFFERS = 2; // Number of output frames allocated.
+ private static final String THREAD_NAME = "ExternalTextureConverter";
+
+ private RenderThread thread;
+
+ /**
+ * Creates the ExternalTextureConverter to create a working copy of each camera frame.
+ *
+ * @param numBuffers the number of camera frames that can enter processing simultaneously.
+ */
+ public ExternalTextureConverter(EGLContext parentContext, int numBuffers) {
+ thread = new RenderThread(parentContext, numBuffers);
+ thread.setName(THREAD_NAME);
+ thread.start();
+ try {
+ thread.waitUntilReady();
+ } catch (InterruptedException ie) {
+ // Someone interrupted our thread. This is not supposed to happen: we own
+ // the thread, and we are not going to interrupt it. Therefore, it is not
+ // reasonable for this constructor to throw an InterruptedException
+ // (which is a checked exception). If it should somehow happen that the
+ // thread is interrupted, let's set the interrupted flag again, log the
+ // error, and throw a RuntimeException.
+ Thread.currentThread().interrupt();
+ Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
+ throw new RuntimeException(ie);
+ }
+ }
+
+ /**
+ * Sets vertical flipping of the texture, useful for conversion between coordinate systems with
+ * top-left v.s. bottom-left origins. This should be called before {@link
+ * #setSurfaceTexture(SurfaceTexture, int, int)} or {@link
+ * #setSurfaceTextureAndAttachToGLContext(SurfaceTexture, int, int)}.
+ */
+ public void setFlipY(boolean flip) {
+ thread.setFlipY(flip);
+ }
+
+ public ExternalTextureConverter(EGLContext parentContext) {
+ this(parentContext, DEFAULT_NUM_BUFFERS);
+ }
+
+ public ExternalTextureConverter(
+ EGLContext parentContext, SurfaceTexture texture, int targetWidth, int targetHeight) {
+ this(parentContext);
+ thread.setSurfaceTexture(texture, targetWidth, targetHeight);
+ }
+
+ /**
+ * Sets the input surface texture.
+ *
+ *
The provided width and height will be the size of the converted texture, so if the input
+ * surface texture is rotated (as expressed by its transformation matrix) the provided width and
+ * height should be swapped.
+ */
+ // TODO: Clean up setSurfaceTexture methods.
+ public void setSurfaceTexture(SurfaceTexture texture, int width, int height) {
+ if (texture != null && (width == 0 || height == 0)) {
+ throw new RuntimeException(
+ "ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero");
+ }
+ thread.getHandler().post(() -> thread.setSurfaceTexture(texture, width, height));
+ }
+
+ // TODO: Clean up setSurfaceTexture methods.
+ public void setSurfaceTextureAndAttachToGLContext(SurfaceTexture texture, int width, int height) {
+ if (texture != null && (width == 0 || height == 0)) {
+ throw new RuntimeException(
+ "ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero");
+ }
+ thread
+ .getHandler()
+ .post(() -> thread.setSurfaceTextureAndAttachToGLContext(texture, width, height));
+ }
+
+ @Override
+ public void setConsumer(TextureFrameConsumer next) {
+ thread.setConsumer(next);
+ }
+
+ public void addConsumer(TextureFrameConsumer consumer) {
+ thread.addConsumer(consumer);
+ }
+
+ public void removeConsumer(TextureFrameConsumer consumer) {
+ thread.removeConsumer(consumer);
+ }
+
+ public void close() {
+ if (thread == null) {
+ return;
+ }
+ thread.getHandler().post(() -> thread.setSurfaceTexture(null, 0, 0));
+ thread.quitSafely();
+ try {
+ thread.join();
+ } catch (InterruptedException ie) {
+ // Set the interrupted flag again, log the error, and throw a RuntimeException.
+ Thread.currentThread().interrupt();
+ Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
+ throw new RuntimeException(ie);
+ }
+ }
+
+ private static class RenderThread extends GlThread
+ implements SurfaceTexture.OnFrameAvailableListener {
+ private static final long NANOS_PER_MICRO = 1000; // Nanoseconds in one microsecond.
+ private volatile SurfaceTexture surfaceTexture = null;
+ private final List consumers;
+ private List outputFrames = null;
+ private int outputFrameIndex = -1;
+ private ExternalTextureRenderer renderer = null;
+ private long timestampOffset = 0;
+ private long previousTimestamp = 0;
+
+ protected int destinationWidth = 0;
+ protected int destinationHeight = 0;
+
+ public RenderThread(EGLContext parentContext, int numBuffers) {
+ super(parentContext);
+ outputFrames = new ArrayList<>();
+ outputFrames.addAll(Collections.nCopies(numBuffers, null));
+ renderer = new ExternalTextureRenderer();
+ consumers = new ArrayList<>();
+ }
+
+ public void setFlipY(boolean flip) {
+ renderer.setFlipY(flip);
+ }
+
+ public void setSurfaceTexture(SurfaceTexture texture, int width, int height) {
+ if (surfaceTexture != null) {
+ surfaceTexture.setOnFrameAvailableListener(null);
+ }
+ surfaceTexture = texture;
+ if (surfaceTexture != null) {
+ surfaceTexture.setOnFrameAvailableListener(this);
+ }
+ destinationWidth = width;
+ destinationHeight = height;
+ }
+
+ public void setSurfaceTextureAndAttachToGLContext(
+ SurfaceTexture texture, int width, int height) {
+ setSurfaceTexture(texture, width, height);
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ surfaceTexture.attachToGLContext(textures[0]);
+ }
+
+ public void setConsumer(TextureFrameConsumer consumer) {
+ synchronized (consumers) {
+ consumers.clear();
+ consumers.add(consumer);
+ }
+ }
+
+ public void addConsumer(TextureFrameConsumer consumer) {
+ synchronized (consumers) {
+ consumers.add(consumer);
+ }
+ }
+
+ public void removeConsumer(TextureFrameConsumer consumer) {
+ synchronized (consumers) {
+ consumers.remove(consumer);
+ }
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ handler.post(() -> renderNext(surfaceTexture));
+ }
+
+ @Override
+ public void prepareGl() {
+ super.prepareGl();
+
+ GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+
+ renderer.setup();
+ }
+
+ @Override
+ public void releaseGl() {
+ for (int i = 0; i < outputFrames.size(); ++i) {
+ teardownDestination(i);
+ }
+ renderer.release();
+ super.releaseGl(); // This releases the EGL context, so must do it after any GL calls.
+ }
+
+ protected void renderNext(SurfaceTexture fromTexture) {
+ if (fromTexture != surfaceTexture) {
+ // Although the setSurfaceTexture and renderNext methods are correctly sequentialized on
+ // the same thread, the onFrameAvailable callback is not. Therefore, it is possible for
+ // onFrameAvailable to queue up a renderNext call while a setSurfaceTexture call is still
+ // pending on the handler. When that happens, we should simply disregard the call.
+ return;
+ }
+ try {
+ synchronized (consumers) {
+ boolean frameUpdated = false;
+ for (TextureFrameConsumer consumer : consumers) {
+ AppTextureFrame outputFrame = nextOutputFrame();
+ // TODO: Switch to ref-counted single copy instead of making additional
+ // copies blitting to separate textures each time.
+ updateOutputFrame(outputFrame);
+ frameUpdated = true;
+
+ if (consumer != null) {
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Locking tex: %d width: %d height: %d",
+ outputFrame.getTextureName(),
+ outputFrame.getWidth(),
+ outputFrame.getHeight()));
+ }
+ outputFrame.setInUse();
+ consumer.onNewFrame(outputFrame);
+ }
+ }
+ if (!frameUpdated) { // Need to update the frame even if there are no consumers.
+ AppTextureFrame outputFrame = nextOutputFrame();
+ // TODO: Switch to ref-counted single copy instead of making additional
+ // copies blitting to separate textures each time.
+ updateOutputFrame(outputFrame);
+ }
+ }
+ } finally {
+ }
+ }
+
+ private void teardownDestination(int index) {
+ if (outputFrames.get(index) != null) {
+ waitUntilReleased(outputFrames.get(index));
+ GLES20.glDeleteTextures(1, new int[] {outputFrames.get(index).getTextureName()}, 0);
+ outputFrames.set(index, null);
+ }
+ }
+
+ private void setupDestination(int index) {
+ teardownDestination(index);
+ int destinationTextureId = ShaderUtil.createRgbaTexture(destinationWidth, destinationHeight);
+ Log.d(
+ TAG,
+ String.format(
+ "Created output texture: %d width: %d height: %d",
+ destinationTextureId, destinationWidth, destinationHeight));
+ bindFramebuffer(destinationTextureId, destinationWidth, destinationHeight);
+ outputFrames.set(
+ index, new AppTextureFrame(destinationTextureId, destinationWidth, destinationHeight));
+ }
+
+
+ /**
+ * Gets next available frame or creates new one if next frame is not initialized
+ * or cannot be used with current surface texture.
+ *
+ *
+ *
Makes sure frame width and height are same as current surface texture
+ *
Makes sure frame is not in use (blocks thread until frame is released)
+ *
+ *
+ * NOTE: must be invoked on GL thread
+ */
+ private AppTextureFrame nextOutputFrame() {
+ outputFrameIndex = (outputFrameIndex + 1) % outputFrames.size();
+ AppTextureFrame outputFrame = outputFrames.get(outputFrameIndex);
+ // Check if the size has changed.
+ if (outputFrame == null
+ || outputFrame.getWidth() != destinationWidth
+ || outputFrame.getHeight() != destinationHeight) {
+ // setupDestination will wait for the frame to be released before reallocating it.
+ setupDestination(outputFrameIndex);
+ outputFrame = outputFrames.get(outputFrameIndex);
+ }
+ waitUntilReleased(outputFrame);
+ return outputFrame;
+ }
+
+ /**
+ * Updates output frame with current pixels of surface texture and corresponding timestamp.
+ *
+ * @param outputFrame {@link AppTextureFrame} to populate.
+ *
+ * NOTE: must be invoked on GL thread
+ */
+ private void updateOutputFrame(AppTextureFrame outputFrame) {
+ // Copy surface texture's pixels to output frame
+ bindFramebuffer(outputFrame.getTextureName(), destinationWidth, destinationHeight);
+ renderer.render(surfaceTexture);
+
+ // Populate frame timestamp with surface texture timestamp after render() as renderer
+ // ensures that surface texture has the up-to-date timestamp. (Also adjust |timestampOffset|
+ // to ensure that timestamps increase monotonically.)
+ long textureTimestamp = surfaceTexture.getTimestamp() / NANOS_PER_MICRO;
+ if (textureTimestamp + timestampOffset <= previousTimestamp) {
+ timestampOffset = previousTimestamp + 1 - textureTimestamp;
+ }
+ outputFrame.setTimestamp(textureTimestamp + timestampOffset);
+ previousTimestamp = outputFrame.getTimestamp();
+ }
+
+ private void waitUntilReleased(AppTextureFrame frame) {
+ try {
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Waiting for tex: %d width: %d height: %d",
+ frame.getTextureName(), frame.getWidth(), frame.getHeight()));
+ }
+ frame.waitUntilReleased();
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Finished waiting for tex: %d width: %d height: %d",
+ frame.getTextureName(), frame.getWidth(), frame.getHeight()));
+ }
+ } catch (InterruptedException ie) {
+ // Someone interrupted our thread. This is not supposed to happen: we own
+ // the thread, and we are not going to interrupt it. If it should somehow
+ // happen that the thread is interrupted, let's set the interrupted flag
+ // again, log the error, and throw a RuntimeException.
+ Thread.currentThread().interrupt();
+ Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
+ throw new RuntimeException(ie);
+ }
+ }
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java
new file mode 100644
index 000000000..c63f0495a
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/FrameProcessor.java
@@ -0,0 +1,303 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.util.Log;
+import com.google.common.base.Preconditions;
+import com.google.mediapipe.framework.AndroidAssetUtil;
+import com.google.mediapipe.framework.AndroidPacketCreator;
+import com.google.mediapipe.framework.Graph;
+import com.google.mediapipe.framework.GraphService;
+import com.google.mediapipe.framework.MediaPipeException;
+import com.google.mediapipe.framework.Packet;
+import com.google.mediapipe.framework.PacketCallback;
+import com.google.mediapipe.framework.PacketGetter;
+import com.google.mediapipe.framework.SurfaceOutput;
+import com.google.mediapipe.framework.TextureFrame;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import javax.annotation.Nullable;
+
+/**
+ * A {@link com.google.mediapipe.components.TextureFrameProcessor} that sends video frames through a
+ * MediaPipe graph.
+ */
+public class FrameProcessor implements TextureFrameProcessor {
+ private static final String TAG = "FrameProcessor";
+
+ private List consumers = new ArrayList<>();
+ private Graph mediapipeGraph;
+ private AndroidPacketCreator packetCreator;
+ private OnWillAddFrameListener addFrameListener;
+ private String videoInputStream;
+ private String videoInputStreamCpu;
+ private String videoOutputStream;
+ private SurfaceOutput videoSurfaceOutput;
+ private final AtomicBoolean started = new AtomicBoolean(false);
+ private boolean hybridPath = false;
+
+ /**
+ * Constructor.
+ *
+ * @param context an Android {@link Context}.
+ * @param parentNativeContext a native handle to a GL context. The GL context(s) used by the
+ * calculators in the graph will join the parent context's sharegroup, so that textures
+ * generated by the calculators are available in the parent context, and vice versa.
+ * @param graphName the name of the file containing the binary representation of the graph.
+ * @param inputStream the graph input stream that will receive input video frames.
+ * @param outputStream the output stream from which output frames will be produced.
+ */
+ public FrameProcessor(
+ Context context,
+ long parentNativeContext,
+ String graphName,
+ String inputStream,
+ String outputStream) {
+ mediapipeGraph = new Graph();
+ videoInputStream = inputStream;
+ videoOutputStream = outputStream;
+
+ try {
+ if (new File(graphName).isAbsolute()) {
+ mediapipeGraph.loadBinaryGraph(graphName);
+ } else {
+ mediapipeGraph.loadBinaryGraph(
+ AndroidAssetUtil.getAssetBytes(context.getAssets(), graphName));
+ }
+
+ packetCreator = new AndroidPacketCreator(mediapipeGraph);
+ mediapipeGraph.addPacketCallback(
+ videoOutputStream,
+ new PacketCallback() {
+ @Override
+ public void process(Packet packet) {
+ List currentConsumers;
+ synchronized (this) {
+ currentConsumers = consumers;
+ }
+ for (TextureFrameConsumer consumer : currentConsumers) {
+ TextureFrame frame = PacketGetter.getTextureFrame(packet);
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Output tex: %d width: %d height: %d to consumer %h",
+ frame.getTextureName(), frame.getWidth(), frame.getHeight(), consumer));
+ }
+ consumer.onNewFrame(frame);
+ }
+ }
+ });
+
+ mediapipeGraph.setParentGlContext(parentNativeContext);
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+
+ videoSurfaceOutput = mediapipeGraph.addSurfaceOutput(videoOutputStream);
+ }
+
+ /**
+ * Interface to be used so that this class can receive a callback when onNewFrame has determined
+ * it will process an input frame. Can be used to feed packets to accessory streams.
+ */
+ public interface OnWillAddFrameListener {
+ void onWillAddFrame(long timestamp);
+ }
+
+ public synchronized void setServiceObject(GraphService service, T object) {
+ mediapipeGraph.setServiceObject(service, object);
+ }
+
+ public void setInputSidePackets(Map inputSidePackets) {
+ Preconditions.checkState(
+ !started.get(), "setInputSidePackets must be called before the graph is started");
+ mediapipeGraph.setInputSidePackets(inputSidePackets);
+ }
+
+ @Override
+ public void setConsumer(TextureFrameConsumer listener) {
+ synchronized (this) {
+ consumers = Arrays.asList(listener);
+ }
+ }
+
+ public void setVideoInputStreamCpu(String inputStream) {
+ videoInputStreamCpu = inputStream;
+ }
+
+ public void setHybridPath() {
+ hybridPath = true;
+ }
+
+ public void addConsumer(TextureFrameConsumer listener) {
+ synchronized (this) {
+ List newConsumers = new ArrayList<>(consumers);
+ newConsumers.add(listener);
+ consumers = newConsumers;
+ }
+ }
+
+ public boolean removeConsumer(TextureFrameConsumer listener) {
+ boolean existed;
+ synchronized (this) {
+ List newConsumers = new ArrayList<>(consumers);
+ existed = newConsumers.remove(listener);
+ consumers = newConsumers;
+ }
+ return existed;
+ }
+
+ /** Gets the {@link Graph} used to run the graph. */
+ public Graph getGraph() {
+ return mediapipeGraph;
+ }
+
+ /** Gets the {@link PacketCreator} associated with the graph. */
+ public AndroidPacketCreator getPacketCreator() {
+ return packetCreator;
+ }
+
+ /** Gets the {@link SurfaceOutput} connected to the video output stream. */
+ public SurfaceOutput getVideoSurfaceOutput() {
+ return videoSurfaceOutput;
+ }
+
+ /** Closes and cleans up the graph. */
+ public void close() {
+ if (started.get()) {
+ try {
+ mediapipeGraph.closeAllPacketSources();
+ mediapipeGraph.waitUntilGraphDone();
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+ try {
+ mediapipeGraph.tearDown();
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+ }
+ }
+
+ /**
+ * Initializes the graph in advance of receiving frames.
+ *
+ *
Normally the graph is initialized when the first frame arrives. You can optionally call this
+ * method to initialize it ahead of time.
+ * @throws MediaPipeException for any error status.
+ */
+ public void preheat() {
+ if (!started.getAndSet(true)) {
+ startGraph();
+ }
+ }
+
+ public void setOnWillAddFrameListener(@Nullable OnWillAddFrameListener addFrameListener) {
+ this.addFrameListener = addFrameListener;
+ }
+
+ /**
+ * Returns true if the MediaPipe graph can accept one more input frame.
+ * @throws MediaPipeException for any error status.
+ */
+ private boolean maybeAcceptNewFrame() {
+ if (!started.getAndSet(true)) {
+ startGraph();
+ }
+ return true;
+ }
+
+ @Override
+ public void onNewFrame(final TextureFrame frame) {
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ String.format(
+ "Input tex: %d width: %d height: %d",
+ frame.getTextureName(), frame.getWidth(), frame.getHeight()));
+ }
+
+ if (!maybeAcceptNewFrame()) {
+ frame.release();
+ return;
+ }
+
+ if (addFrameListener != null) {
+ addFrameListener.onWillAddFrame(frame.getTimestamp());
+ }
+
+ Packet imagePacket = packetCreator.createGpuBuffer(frame);
+
+ try {
+ // addConsumablePacketToInputStream allows the graph to take exclusive ownership of the
+ // packet, which may allow for more memory optimizations.
+ mediapipeGraph.addConsumablePacketToInputStream(
+ videoInputStream, imagePacket, frame.getTimestamp());
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+ imagePacket.release();
+ }
+
+ /**
+ * Accepts a Bitmap to be sent to main input stream at the given timestamp.
+ *
+ *
Note: This requires a graph that takes an ImageFrame instead of a mediapipe::GpuBuffer. An
+ * instance of FrameProcessor should only ever use this or the other variant for onNewFrame().
+ */
+ public void onNewFrame(final Bitmap bitmap, long timestamp) {
+ if (!maybeAcceptNewFrame()) {
+ return;
+ }
+
+ if (!hybridPath && addFrameListener != null) {
+ addFrameListener.onWillAddFrame(timestamp);
+ }
+
+ Packet packet = getPacketCreator().createRgbImageFrame(bitmap);
+
+ try {
+ // addConsumablePacketToInputStream allows the graph to take exclusive ownership of the
+ // packet, which may allow for more memory optimizations.
+ mediapipeGraph.addConsumablePacketToInputStream(videoInputStreamCpu, packet, timestamp);
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+ packet.release();
+ }
+
+ public void waitUntilIdle() {
+ try {
+ mediapipeGraph.waitUntilGraphIdle();
+ } catch (MediaPipeException e) {
+ Log.e(TAG, "Mediapipe error: ", e);
+ }
+ }
+
+ /**
+ * Starts running the MediaPipe graph.
+ * @throws MediaPipeException for any error status.
+ */
+ private void startGraph() {
+ mediapipeGraph.startRunningGraph();
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java
new file mode 100644
index 000000000..9a4764db2
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/MicrophoneHelper.java
@@ -0,0 +1,295 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.AudioTimestamp;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.util.Log;
+import javax.annotation.Nullable;
+
+/** Provides access to audio data from a microphone. */
+public class MicrophoneHelper {
+ /** The listener is called when audio data from the microphone is available. */
+ public interface OnAudioDataAvailableListener {
+ public void onAudioDataAvailable(byte[] audioData, long timestampMicros);
+ }
+
+ private static final String TAG = "MicrophoneHelper";
+
+ private static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
+ private static final int AUDIO_SOURCE = AudioSource.MIC;
+
+ // A small constant valued multiplier for setting bufferSize. This is useful
+ // to reduce buffer overflows when a lot of data needs to be read at a high
+ // sample rate from the audio stream. Note that it is desirable to keep this
+ // multiplier small, because very large buffer sizes can slow down blocking
+ // calls to AudioRecord.read(...) when the sample rate is low for instance.
+ private static final int BUFFER_SIZE_MULTIPLIER = 2;
+
+ // A small constant value to decide the number of seconds of audio data that
+ // will be read in a single AudioRecord.read(...) call when
+ // AudioRecord.minBufferSize(...) is unavailable. Smaller values for this
+ // constant favor faster blocking calls to AudioRecord.read(...).
+ private static final int MAX_READ_INTERVAL_SEC = 1;
+
+ // This class uses AudioFormat.ENCODING_PCM_16BIT, i.e. 16 bits per single channel sample.
+ private static final int BYTES_PER_MONO_SAMPLE = 2;
+
+ private static final long UNINITIALIZED_TIMESTAMP = -1;
+ private static final long NANOS_PER_MICROS = 1000;
+ private static final long MICROS_PER_SECOND = 1000000;
+
+ // Number of audio samples recorded per second.
+ private final int sampleRateInHz;
+ // Channel configuration of audio source, one of AudioRecord.CHANNEL_IN_MONO or
+ // AudioRecord.CHANNEL_IN_STEREO.
+ private final int channelConfig;
+ // Data storage allocated to record audio samples in a single function call to AudioRecord.read().
+ private final int bufferSize;
+ // Bytes used per sample, accounts for number of channels of audio source. Possible values are 2
+ // bytes for a 1-channel sample and 4 bytes for a 2-channel sample.
+ private final int bytesPerSample;
+
+ private byte[] audioData;
+
+ // Timestamp provided by the AudioTimestamp object.
+ private AudioTimestamp audioTimestamp;
+ // Initial timestamp base. Can be set by the client so that all timestamps calculated using the
+ // number of samples read per AudioRecord.read() function call start from this timestamp.
+ private long initialTimestamp = UNINITIALIZED_TIMESTAMP;
+ // The total number of samples read from multiple calls to AudioRecord.read(). This is reset to
+ // zero for every startMicrophone() call.
+ private long totalNumSamplesRead;
+
+ // AudioRecord is used to setup a way to record data from the audio source. See
+ // https://developer.android.com/reference/android/media/AudioRecord.htm for details.
+ private AudioRecord audioRecord;
+ // Data is read on a separate non-blocking thread.
+ private Thread recordingThread;
+
+ // This flag determines if audio will be read from the audio source and if the data read will be
+ // sent to the listener of this class.
+ private boolean recording = false;
+
+ // This listener is provided with the data read on every AudioRecord.read() call. If the listener
+ // called stopRecording() while a call to AudioRecord.read() was blocked, the class will discard
+ // the data read after recording stopped.
+ private OnAudioDataAvailableListener onAudioDataAvailableListener;
+
+ /**
+ * MicrophoneHelper class constructor. Arugments:
+ *
+ * @param sampleRateInHz Number of samples per second to be read from audio stream.
+ * @param channelConfig Configuration of audio channels. See
+ * https://developer.android.com/reference/android/media/AudioRecord.html#public-constructors_1.
+ */
+ public MicrophoneHelper(int sampleRateInHz, int channelConfig) {
+ this.sampleRateInHz = sampleRateInHz;
+ this.channelConfig = channelConfig;
+
+ // Number of channels of audio source, depending on channelConfig.
+ final int channelCount = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1;
+
+ bytesPerSample = BYTES_PER_MONO_SAMPLE * channelCount;
+
+ // The minimum buffer size required by AudioRecord.
+ final int minBufferSize =
+ AudioRecord.getMinBufferSize(
+ sampleRateInHz, channelConfig, /*audioFormat=*/ AUDIO_ENCODING);
+
+ // Set bufferSize. If the minimum buffer size permitted by the hardware is
+ // unavailable, use the the sampleRateInHz value as the number of bytes.
+ // This is arguably better than another arbitrary constant because a higher
+ // value of sampleRateInHz implies the need for reading large chunks of data
+ // from the audio stream in each AudioRecord.read(...) call.
+ if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+ Log.e(TAG, "AudioRecord minBufferSize unavailable.");
+ bufferSize = sampleRateInHz * MAX_READ_INTERVAL_SEC * bytesPerSample * BUFFER_SIZE_MULTIPLIER;
+ } else {
+ bufferSize = minBufferSize * BUFFER_SIZE_MULTIPLIER;
+ }
+ }
+
+ private void setupAudioRecord() {
+ audioData = new byte[bufferSize];
+
+ Log.d(TAG, "AudioRecord(" + sampleRateInHz + ", " + bufferSize + ")");
+ audioRecord =
+ new AudioRecord.Builder()
+ .setAudioSource(AUDIO_SOURCE)
+ .setAudioFormat(
+ new AudioFormat.Builder()
+ .setEncoding(AUDIO_ENCODING)
+ .setSampleRate(sampleRateInHz)
+ .setChannelMask(channelConfig)
+ .build())
+ .setBufferSizeInBytes(bufferSize)
+ .build();
+
+ if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+ audioRecord.release();
+ Log.e(TAG, "AudioRecord could not open.");
+ return;
+ }
+
+ recordingThread =
+ new Thread(
+ () -> {
+ android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
+ Log.v(TAG, "Running audio recording thread.");
+
+ // Initial timestamp in case the AudioRecord.getTimestamp() function is unavailable.
+ long startTimestamp = initialTimestamp != UNINITIALIZED_TIMESTAMP
+ ? initialTimestamp
+ : System.nanoTime() / NANOS_PER_MICROS;
+ long sampleBasedTimestamp;
+ while (recording) {
+ if (audioRecord == null) {
+ break;
+ }
+ final int numBytesRead =
+ audioRecord.read(audioData, /*offsetInBytes=*/ 0, /*sizeInBytes=*/ bufferSize);
+ // If AudioRecord.getTimestamp() is unavailable, calculate the timestamp using the
+ // number of samples read in the call to AudioRecord.read().
+ long sampleBasedFallbackTimestamp =
+ startTimestamp + totalNumSamplesRead * MICROS_PER_SECOND / sampleRateInHz;
+ sampleBasedTimestamp =
+ getTimestamp(/*fallbackTimestamp=*/sampleBasedFallbackTimestamp);
+ if (numBytesRead <= 0) {
+ if (numBytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+ Log.e(TAG, "ERROR_INVALID_OPERATION");
+ } else if (numBytesRead == AudioRecord.ERROR_BAD_VALUE) {
+ Log.e(TAG, "ERROR_BAD_VALUE");
+ }
+ continue;
+ }
+ Log.v(TAG, "Read " + numBytesRead + " bytes of audio data.");
+
+ // Confirm that the listener is still interested in receiving audio data and
+ // stopMicrophone() wasn't called. If the listener called stopMicrophone(), discard
+ // the data read in the latest AudioRecord.read(...) function call.
+ if (recording) {
+ onAudioDataAvailableListener.onAudioDataAvailable(
+ audioData.clone(), sampleBasedTimestamp);
+ }
+
+ // TODO: Replace byte[] with short[] audioData.
+ // It is expected that audioRecord.read() will read full samples and therefore
+ // numBytesRead is expected to be a multiple of bytesPerSample.
+ int numSamplesRead = numBytesRead / bytesPerSample;
+ totalNumSamplesRead += numSamplesRead;
+ }
+ });
+ }
+
+ // If AudioRecord.getTimestamp() is available and returns without error, this function returns the
+ // timestamp using AudioRecord.getTimestamp(). If the function is unavailable, it returns a
+ // fallbackTimestamp provided as an argument to this method.
+ private long getTimestamp(long fallbackTimestamp) {
+ // AudioRecord.getTimestamp is only available at API Level 24 and above.
+ // https://developer.android.com/reference/android/media/AudioRecord.html#getTimestamp(android.media.AudioTimestamp,%20int).
+ if (VERSION.SDK_INT >= VERSION_CODES.N) {
+ if (audioTimestamp == null) {
+ audioTimestamp = new AudioTimestamp();
+ }
+ int status = audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
+ if (status == AudioRecord.SUCCESS) {
+ return audioTimestamp.nanoTime / NANOS_PER_MICROS;
+ } else {
+ Log.e(TAG, "audioRecord.getTimestamp failed with status: " + status);
+ }
+ }
+ return fallbackTimestamp;
+ }
+
+ // Returns the buffer size read by this class per AudioRecord.read() call.
+ public int getBufferSize() {
+ return bufferSize;
+ }
+
+ /**
+ * Overrides the use of system time as the source of timestamps for audio packets. Not
+ * recommended. Provided to maintain compatibility with existing usage by CameraRecorder.
+ */
+ public void setInitialTimestamp(long initialTimestamp) {
+ this.initialTimestamp = initialTimestamp;
+ }
+
+ // This method sets up a new AudioRecord object for reading audio data from the microphone. It
+ // can be called multiple times to restart the recording if necessary.
+ public void startMicrophone() {
+ if (recording) {
+ return;
+ }
+
+ setupAudioRecord();
+ audioRecord.startRecording();
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+ Log.e(TAG, "AudioRecord couldn't start recording.");
+ audioRecord.release();
+ return;
+ }
+
+ recording = true;
+ totalNumSamplesRead = 0;
+ recordingThread.start();
+
+ Log.d(TAG, "AudioRecord is recording audio.");
+ }
+
+ // Stops the AudioRecord object from reading data from the microphone and releases it.
+ public void stopMicrophone() {
+ stopMicrophoneWithoutCleanup();
+ cleanup();
+ Log.d(TAG, "AudioRecord stopped recording audio.");
+ }
+
+ // Stops the AudioRecord object from reading data from the microphone.
+ public void stopMicrophoneWithoutCleanup() {
+ if (!recording) {
+ return;
+ }
+
+ recording = false;
+ try {
+ if (recordingThread != null) {
+ recordingThread.join();
+ }
+ } catch (InterruptedException ie) {
+ Log.e(TAG, "Exception: ", ie);
+ }
+
+ audioRecord.stop();
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
+ Log.e(TAG, "AudioRecord.stop() didn't run properly.");
+ }
+ }
+
+ // Releases the AudioRecord object when there is no ongoing recording.
+ public void cleanup() {
+ if (recording) {
+ return;
+ }
+ audioRecord.release();
+ }
+
+ public void setOnAudioDataAvailableListener(@Nullable OnAudioDataAvailableListener listener) {
+ onAudioDataAvailableListener = listener;
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java
new file mode 100644
index 000000000..976200988
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/PermissionHelper.java
@@ -0,0 +1,93 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import android.Manifest;
+import android.app.Activity;
+import android.content.pm.PackageManager;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+import android.util.Log;
+
+/** Manages camera permission request and handling. */
+public class PermissionHelper {
+ private static final String TAG = "PermissionHelper";
+
+ private static final String AUDIO_PERMISSION = Manifest.permission.RECORD_AUDIO;
+
+ private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
+
+ private static final int REQUEST_CODE = 0;
+
+ public static boolean permissionsGranted(Activity context, String[] permissions) {
+ for (String permission : permissions) {
+ int permissionStatus = ContextCompat.checkSelfPermission(context, permission);
+ if (permissionStatus != PackageManager.PERMISSION_GRANTED) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public static void checkAndRequestPermissions(Activity context, String[] permissions) {
+ if (!permissionsGranted(context, permissions)) {
+ ActivityCompat.requestPermissions(context, permissions, REQUEST_CODE);
+ }
+ }
+
+ /** Called by context to check if camera permissions have been granted. */
+ public static boolean cameraPermissionsGranted(Activity context) {
+ return permissionsGranted(context, new String[] {CAMERA_PERMISSION});
+ }
+
+ /**
+ * Called by context to check if camera permissions have been granted and if not, request them.
+ */
+ public static void checkAndRequestCameraPermissions(Activity context) {
+ Log.d(TAG, "checkAndRequestCameraPermissions");
+ checkAndRequestPermissions(context, new String[] {CAMERA_PERMISSION});
+ }
+
+ /** Called by context to check if audio permissions have been granted. */
+ public static boolean audioPermissionsGranted(Activity context) {
+ return permissionsGranted(context, new String[] {AUDIO_PERMISSION});
+ }
+
+ /** Called by context to check if audio permissions have been granted and if not, request them. */
+ public static void checkAndRequestAudioPermissions(Activity context) {
+ Log.d(TAG, "checkAndRequestAudioPermissions");
+ checkAndRequestPermissions(context, new String[] {AUDIO_PERMISSION});
+ }
+
+ /** Called by context when permissions request has been completed. */
+ public static void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ Log.d(TAG, "onRequestPermissionsResult");
+ if (permissions.length > 0 && grantResults.length != permissions.length) {
+ Log.d(TAG, "Permission denied.");
+ return;
+ }
+ for (int i = 0; i < grantResults.length; ++i) {
+ if (grantResults[i] == PackageManager.PERMISSION_GRANTED) {
+ Log.d(TAG, permissions[i] + " permission granted.");
+ }
+ }
+ // Note: We don't need any special callbacks when permissions are ready because activities
+ // using this helper class can have code in onResume() which is called after the
+ // permissions dialog box closes. The code can be branched depending on if permissions are
+ // available via permissionsGranted(Activity).
+ return;
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java
new file mode 100644
index 000000000..4c62ebbcb
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameConsumer.java
@@ -0,0 +1,23 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+import com.google.mediapipe.framework.TextureFrame;
+
+/** Lightweight abstraction for an object that can receive video frames. */
+public interface TextureFrameConsumer {
+ /** Called when a new {@link TextureFrame} is available. */
+ public abstract void onNewFrame(TextureFrame frame);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java
new file mode 100644
index 000000000..65d4a8b55
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProcessor.java
@@ -0,0 +1,21 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+/**
+ * Lightweight abstraction for an object that can receive video frames, process them, and pass them
+ * on to another object.
+ */
+public interface TextureFrameProcessor extends TextureFrameProducer, TextureFrameConsumer {}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java
new file mode 100644
index 000000000..dcc73c749
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/components/TextureFrameProducer.java
@@ -0,0 +1,21 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.components;
+
+/** Lightweight abstraction for an object that can produce video frames. */
+public interface TextureFrameProducer {
+ /** Set the consumer that receives the output from this producer. */
+ void setConsumer(TextureFrameConsumer next);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java
new file mode 100644
index 000000000..c3a053250
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidAssetUtil.java
@@ -0,0 +1,60 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import com.google.common.io.ByteStreams;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Helper methods for handling Android assets.
+ */
+public final class AndroidAssetUtil {
+ /**
+ * Returns an asset's contents as a byte array. This is meant to be used in combination with
+ * {@link Graph#loadBinaryGraph}.
+ *
+ * @param assetName The name of an asset, same as in {@link AssetManager#open(String)}.
+ */
+ public static byte[] getAssetBytes(AssetManager assets, String assetName) {
+ byte[] assetData;
+ try {
+ InputStream stream = assets.open(assetName);
+ assetData = ByteStreams.toByteArray(stream);
+ stream.close();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return assetData;
+ }
+
+ /**
+ * Initializes the native asset manager, which is used by native code to access assets directly.
+ *
+ *
Note: When possible, using {@link AssetCache} is preferred for portability, since it does
+ * not require any special handling for Android assets on the native code side.
+ */
+ public static boolean initializeNativeAssetManager(Context androidContext) {
+ return nativeInitializeAssetManager(
+ androidContext, androidContext.getCacheDir().getAbsolutePath());
+ }
+
+ private static native boolean nativeInitializeAssetManager(
+ Context androidContext, String cacheDirPath);
+
+ private AndroidAssetUtil() {}
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java
new file mode 100644
index 000000000..5e1a7a135
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketCreator.java
@@ -0,0 +1,60 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.graphics.Bitmap;
+
+// TODO: use Preconditions in this file.
+/**
+ * Android-specific subclass of PacketCreator.
+ *
+ *
See {@link PacketCreator} for general information.
+ *
+ *
This class contains methods that are Android-specific. You can (and should) use the base
+ * PacketCreator on Android if you do not need any methods from this class.
+ */
+public class AndroidPacketCreator extends PacketCreator {
+ public AndroidPacketCreator(Graph context) {
+ super(context);
+ }
+
+ /** Creates a 3 channel RGB ImageFrame packet from a {@link Bitmap}. */
+ public Packet createRgbImageFrame(Bitmap bitmap) {
+ if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
+ throw new RuntimeException("bitmap must use ARGB_8888 config.");
+ }
+ return Packet.create(nativeCreateRgbImageFrame(mediapipeGraph.getNativeHandle(), bitmap));
+ }
+
+ /** Creates a 4 channel RGBA ImageFrame packet from a {@link Bitmap}. */
+ public Packet createRgbaImageFrame(Bitmap bitmap) {
+ if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
+ throw new RuntimeException("bitmap must use ARGB_8888 config.");
+ }
+ return Packet.create(nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), bitmap));
+ }
+
+ /**
+ * Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on
+ * failure.
+ */
+ private native long nativeCreateRgbImageFrame(long context, Bitmap bitmap);
+
+ /**
+ * Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on
+ * failure.
+ */
+ private native long nativeCreateRgbaImageFrame(long context, Bitmap bitmap);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java
new file mode 100644
index 000000000..55357300e
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AndroidPacketGetter.java
@@ -0,0 +1,69 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.graphics.Bitmap;
+import com.google.common.flogger.FluentLogger;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Android-specific subclass of PacketGetter.
+ *
+ *
See {@link PacketGetter} for general information.
+ *
+ *
This class contains methods that are Android-specific.
+ */
+public final class AndroidPacketGetter {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
+ /** Gets an {@code ARGB_8888} bitmap from an RGB mediapipe image frame packet. */
+ public static Bitmap getBitmapFromRgb(Packet packet) {
+ int width = PacketGetter.getImageWidth(packet);
+ int height = PacketGetter.getImageHeight(packet);
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
+ PacketGetter.getRgbaFromRgb(packet, buffer);
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(buffer);
+ return bitmap;
+ }
+
+ /**
+ * Gets an {@code ARGB_8888} bitmap from an RGBA mediapipe image frame packet. Returns null in
+ * case of failure.
+ */
+ public static Bitmap getBitmapFromRgba(Packet packet) {
+ // TODO: unify into a single getBitmap call.
+ // TODO: use NDK Bitmap access instead of copyPixelsToBuffer.
+ int width = PacketGetter.getImageWidth(packet);
+ int height = PacketGetter.getImageHeight(packet);
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
+ buffer.order(ByteOrder.nativeOrder());
+ // Note: even though the Android Bitmap config is named ARGB_8888, the data
+ // is stored as RGBA internally.
+ boolean status = PacketGetter.getImageData(packet, buffer);
+ if (!status) {
+ logger.atSevere().log(
+ "Got error from getImageData, returning null Bitmap. Image width %d, height %d",
+ width, height);
+ return null;
+ }
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(buffer);
+ return bitmap;
+ }
+
+ private AndroidPacketGetter() {}
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java
new file mode 100644
index 000000000..20cb81982
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AppTextureFrame.java
@@ -0,0 +1,157 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * A {@link TextureFrame} that represents a texture produced by the application.
+ *
+ *
The {@link #waitUntilReleased()} method can be used to wait for the consumer to be done with
+ * the texture before destroying or overwriting it.
+ *
+ *
With this class, your application is the producer. The consumer can be MediaPipe (if you send
+ * the frame into a MediaPipe graph using {@link PacketCreator#createGpuBuffer(TextureFrame)}) or
+ * your application (if you just hand it to another part of your application without going through
+ * MediaPipe).
+ */
+public class AppTextureFrame implements TextureFrame {
+ private int textureName;
+ private int width;
+ private int height;
+ private long timestamp = Long.MIN_VALUE;
+ private boolean inUse = false;
+ private boolean legacyInUse = false; // This ignores GL context sync.
+ private GlSyncToken releaseSyncToken = null;
+
+ public AppTextureFrame(int textureName, int width, int height) {
+ this.textureName = textureName;
+ this.width = width;
+ this.height = height;
+ }
+
+ public void setTimestamp(long timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ @Override
+ public int getTextureName() {
+ return textureName;
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public long getTimestamp() {
+ return timestamp;
+ }
+
+ /**
+ * Waits until the consumer is done with the texture.
+ * @throws InterruptedException
+ */
+ public void waitUntilReleased() throws InterruptedException {
+ synchronized (this) {
+ while (inUse && releaseSyncToken == null) {
+ wait();
+ }
+ if (releaseSyncToken != null) {
+ releaseSyncToken.waitOnCpu();
+ releaseSyncToken.release();
+ inUse = false;
+ releaseSyncToken = null;
+ }
+ }
+ }
+
+ /**
+ * Returns whether the texture is currently in use.
+ *
+ * @deprecated this ignores cross-context sync. You should use {@link waitUntilReleased} instead,
+ * because cross-context sync cannot be supported efficiently using this API.
+ */
+ @Deprecated
+ public boolean getInUse() {
+ synchronized (this) {
+ return legacyInUse;
+ }
+ }
+
+ /**
+ * Marks the texture as currently in use.
+ *
The producer calls this before handing the texture off to the consumer.
+ */
+ public void setInUse() {
+ synchronized (this) {
+ if (releaseSyncToken != null) {
+ releaseSyncToken.release();
+ releaseSyncToken = null;
+ }
+ inUse = true;
+ legacyInUse = true;
+ }
+ }
+
+ /**
+ * Marks the texture as no longer in use.
+ *
The consumer calls this when it is done using the texture.
+ */
+ @Override
+ public void release() {
+ synchronized (this) {
+ inUse = false;
+ legacyInUse = false;
+ notifyAll();
+ }
+ }
+
+ /**
+ * Called by MediaPipe when the texture has been released.
+ *
+ *
The sync token can be used to ensure that the GPU is done reading from the texture.
+ */
+ @Override
+ public void release(GlSyncToken syncToken) {
+ synchronized (this) {
+ if (releaseSyncToken != null) {
+ releaseSyncToken.release();
+ releaseSyncToken = null;
+ }
+ releaseSyncToken = syncToken;
+ // Note: we deliberately do not set inUse to false here. Clients should call
+ // waitUntilReleased. See deprecation notice on getInUse.
+ legacyInUse = false;
+ notifyAll();
+ }
+ }
+
+ @Override
+ public void finalize() {
+ // Note: we do not normally want to rely on finalize to dispose of native objects. In this
+ // case, however, the object is normally disposed of in the wait method; the finalize method
+ // serves as a fallback in case the application simply drops the object. The token object is
+ // small, so even if its destruction is delayed, it's not a huge problem.
+ if (releaseSyncToken != null) {
+ releaseSyncToken.release();
+ releaseSyncToken = null;
+ }
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java
new file mode 100644
index 000000000..1702f4d4b
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCache.java
@@ -0,0 +1,206 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.content.Context;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.content.res.AssetManager;
+import androidx.annotation.VisibleForTesting;
+import android.text.TextUtils;
+import com.google.common.base.Preconditions;
+import com.google.common.flogger.FluentLogger;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import javax.annotation.Nullable;
+
+/**
+ * A singleton class to help accessing assets as normal files in native code.
+ *
+ *
This class extracts Android assets as files in a cache directory so that they can be accessed
+ * by code that expects a regular file path.
+ *
+ *
The cache is automatically purged when the versionCode in the app's manifest changes, to avoid
+ * using stale assets. If a versionCode is not specified, the cache is disabled.
+ */
+public class AssetCache {
+
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+ @VisibleForTesting static final String MEDIAPIPE_ASSET_CACHE_DIR = "mediapipe_asset_cache";
+ private static AssetCache assetCache;
+ private int appVersionCode;
+ private AssetCacheDbHelper versionDatabase;
+ private Context context;
+
+ /**
+ * Create {@link AssetCache} with an Android context.
+ *
+ *
Asset manager needs context to access the asset files. {@link Create} can be called in the
+ * main activity.
+ */
+ public static synchronized AssetCache create(Context context) {
+ Preconditions.checkNotNull(context);
+ if (assetCache == null) {
+ assetCache = new AssetCache(context);
+ }
+ return assetCache;
+ }
+
+ /**
+ * Purge the cached assets.
+ *
+ *
This should only be needed in local dev builds that do not update the versionCode in the
+ * app's manifest.
+ */
+ public static synchronized void purgeCache(Context context) {
+ AssetCacheDbHelper dbHelper = new AssetCacheDbHelper(context);
+ dbHelper.invalidateCache(-1);
+ dbHelper.close();
+ }
+
+ /**
+ * Get {@link AssetCache} without context.
+ *
+ *
If not created, {@code null} is returned.
+ */
+ @Nullable
+ public static synchronized AssetCache getAssetCache() {
+ return assetCache;
+ }
+
+ /**
+ * Loads all the assets in a given assets path.
+ * @param assetsPath the assets path from which to load.
+ */
+ public synchronized void loadAllAssets(String assetsPath) {
+ Preconditions.checkNotNull(assetsPath);
+
+ AssetManager assetManager = context.getAssets();
+ String[] assetFiles = null;
+ try {
+ assetFiles = assetManager.list(assetsPath);
+ } catch (IOException e) {
+ logger.atSevere().withCause(e).log("Unable to get files in assets path: %s", assetsPath);
+ }
+ if (assetFiles == null || assetFiles.length == 0) {
+ logger.atWarning().log("No files to load");
+ return;
+ }
+
+ for (String file : assetFiles) {
+ // If a path was specified, prepend it to the filename with "/", otherwise, just
+ // use the file name.
+ String path = TextUtils.isEmpty(assetsPath) ? file : assetsPath + "/" + file;
+ getAbsolutePathFromAsset(path);
+ }
+ }
+
+ /**
+ * Get the absolute path for an asset file.
+ *
+ *
The asset will be unpacked to the application's files directory if not already done.
+ *
+ * @param assetPath path to a file under asset.
+ * @return the absolute file system path to the unpacked asset file.
+ */
+ public synchronized String getAbsolutePathFromAsset(String assetPath) {
+ AssetManager assetManager = context.getAssets();
+ File destinationDir = getDefaultMediaPipeCacheDir();
+ destinationDir.mkdir();
+ File assetFile = new File(assetPath);
+ String assetName = assetFile.getName();
+ File destinationFile = new File(destinationDir.getPath(), assetName);
+ // If app version code is not defined, we don't use cache.
+ if (destinationFile.exists() && appVersionCode != 0
+ && versionDatabase.checkVersion(assetPath, appVersionCode)) {
+ return destinationFile.getAbsolutePath();
+ }
+ InputStream inStream = null;
+ try {
+ inStream = assetManager.open(assetPath);
+ writeStreamToFile(inStream, destinationFile);
+ } catch (IOException ioe) {
+ logger.atSevere().log("Unable to unpack: %s", assetPath);
+ try {
+ if (inStream != null) {
+ inStream.close();
+ }
+ } catch (IOException ioe2) {
+ return null;
+ }
+ return null;
+ }
+ // If app version code is not defined, we don't use cache.
+ if (appVersionCode != 0) {
+ versionDatabase.insertAsset(assetPath, destinationFile.getAbsolutePath(), appVersionCode);
+ }
+ return destinationFile.getAbsolutePath();
+ }
+
+ /**
+ * Return all the file names of the assets that were saved to cache from the application's
+ * resources.
+ */
+ public synchronized String[] getAvailableAssets() {
+ File assetsDir = getDefaultMediaPipeCacheDir();
+ if (assetsDir.exists()) {
+ return assetsDir.list();
+ }
+ return new String[0];
+ }
+
+ /**
+ * Returns the default cache directory used by the AssetCache to store the assets.
+ */
+ public File getDefaultMediaPipeCacheDir() {
+ return new File(context.getCacheDir(), MEDIAPIPE_ASSET_CACHE_DIR);
+ }
+
+ private AssetCache(Context context) {
+ this.context = context;
+ versionDatabase = new AssetCacheDbHelper(context);
+ try {
+ appVersionCode = context.getPackageManager()
+ .getPackageInfo(context.getPackageName(), 0).versionCode;
+ logger.atInfo().log("Current app version code: %d", appVersionCode);
+ } catch (NameNotFoundException e) {
+ throw new RuntimeException("Can't get app version code.", e);
+ }
+ // Remove all the cached items that don't agree with the current app version.
+ versionDatabase.invalidateCache(appVersionCode);
+ }
+
+ private static void writeStreamToFile(InputStream inStream, File destinationFile)
+ throws IOException {
+ final int bufferSize = 1000;
+ FileOutputStream outStream = null;
+ try {
+ outStream = new FileOutputStream(destinationFile);
+ byte[] buffer = new byte[bufferSize];
+ while (true) {
+ int n = inStream.read(buffer);
+ if (n == -1) {
+ break;
+ }
+ outStream.write(buffer, 0, n);
+ }
+ } finally {
+ if (outStream != null) {
+ outStream.close();
+ }
+ }
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java
new file mode 100644
index 000000000..55c628088
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/AssetCacheDbHelper.java
@@ -0,0 +1,175 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.content.ContentValues;
+import android.content.Context;
+import android.database.Cursor;
+import android.database.sqlite.SQLiteDatabase;
+import android.database.sqlite.SQLiteOpenHelper;
+import android.provider.BaseColumns;
+import com.google.common.flogger.FluentLogger;
+import java.io.File;
+
+/**
+ * Database to keep the cached version of asset valid.
+ */
+public class AssetCacheDbHelper extends SQLiteOpenHelper {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
+ public static final int DATABASE_VERSION = 2;
+ public static final String DATABASE_NAME = "mediapipe.db";
+
+ private static final String INT_TYPE = " INTEGER";
+ private static final String TEXT_TYPE = " TEXT";
+ private static final String TEXT_UNIQUE_TYPE = " TEXT NOT NULL UNIQUE";
+ private static final String COMMA_SEP = ",";
+ private static final String SQL_CREATE_TABLE =
+ "CREATE TABLE " + AssetCacheEntry.TABLE_NAME + " ("
+ + AssetCacheEntry._ID + " INTEGER PRIMARY KEY,"
+ + AssetCacheEntry.COLUMN_NAME_ASSET + TEXT_UNIQUE_TYPE + COMMA_SEP
+ + AssetCacheEntry.COLUMN_NAME_CACHE_PATH + TEXT_TYPE + COMMA_SEP
+ + AssetCacheEntry.COLUMN_NAME_VERSION + INT_TYPE + " )";
+
+ private static final String SQL_DELETE_TABLE =
+ "DROP TABLE IF EXISTS " + AssetCacheEntry.TABLE_NAME;
+
+ /**
+ * The columns in the AssetVersion table.
+ */
+ public abstract static class AssetCacheEntry implements BaseColumns {
+ public static final String TABLE_NAME = "AssetVersion";
+ public static final String COLUMN_NAME_ASSET = "asset";
+ public static final String COLUMN_NAME_CACHE_PATH = "cache_path";
+ public static final String COLUMN_NAME_VERSION = "version";
+ }
+
+ public AssetCacheDbHelper(Context context) {
+ super(context, DATABASE_NAME, null, DATABASE_VERSION);
+ }
+
+ /**
+ * Check if the cached version is current in database.
+ *
+ * @return true if the asset is cached and the app is not upgraded. Otherwise return false.
+ */
+ public boolean checkVersion(String assetPath, int currentAppVersion) {
+ SQLiteDatabase db = getReadableDatabase();
+ String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ?";
+ String[] projection = {AssetCacheEntry.COLUMN_NAME_VERSION};
+ String[] selectionArgs = {assetPath};
+
+ Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs);
+
+ if (cursor.getCount() == 0) {
+ return false;
+ }
+
+ cursor.moveToFirst();
+ int cachedVersion = cursor.getInt(
+ cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_VERSION));
+ cursor.close();
+ return cachedVersion == currentAppVersion;
+ }
+
+ /**
+ * Remove all entries in the version table that don't have the correct version.
+ *
+ *
Invalidates all cached asset contents that doesn't have the specified version.
+ */
+ public void invalidateCache(int currentAppVersion) {
+ SQLiteDatabase db = getWritableDatabase();
+ String selection = AssetCacheEntry.COLUMN_NAME_VERSION + " != ?";
+ String[] selectionArgs = {Integer.toString(currentAppVersion)};
+ // Remve the cached files.
+ removeCachedFiles(db, selection, selectionArgs);
+ // Remve the rows in the table.
+ db.delete(AssetCacheEntry.TABLE_NAME, selection, selectionArgs);
+ }
+
+ /**
+ * Insert the cached version of the asset into the database.
+ */
+ public void insertAsset(String asset, String cachePath, int appVersion) {
+ SQLiteDatabase db = getWritableDatabase();
+ // Remove the old cached file first if they are different from the new cachePath.
+ String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ? and "
+ + AssetCacheEntry.COLUMN_NAME_CACHE_PATH + " != ?";
+ String[] selectionArgs = {asset, cachePath};
+ removeCachedFiles(db, selection, selectionArgs);
+
+ ContentValues values = new ContentValues();
+ values.put(AssetCacheEntry.COLUMN_NAME_ASSET, asset);
+ values.put(AssetCacheEntry.COLUMN_NAME_CACHE_PATH, cachePath);
+ values.put(AssetCacheEntry.COLUMN_NAME_VERSION, appVersion);
+ long newRowId = db.insertWithOnConflict(
+ AssetCacheEntry.TABLE_NAME,
+ null,
+ values,
+ SQLiteDatabase.CONFLICT_REPLACE);
+ // According to documentation, -1 means any error.
+ if (newRowId == -1) {
+ throw new RuntimeException("Can't insert entry into the mediapipe db.");
+ }
+ }
+
+ @Override
+ public void onCreate(SQLiteDatabase db) {
+ db.execSQL(SQL_CREATE_TABLE);
+ }
+
+ @Override
+ public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
+ // Since version 1 doesn't have the path in the table, just upgrade the table.
+ db.execSQL(SQL_DELETE_TABLE);
+ onCreate(db);
+ }
+
+ @Override
+ public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
+ onUpgrade(db, oldVersion, newVersion);
+ }
+
+ private Cursor queryAssetCacheTable(
+ SQLiteDatabase db, String[] projection, String selection, String[] selectionArgs) {
+ return db.query(
+ AssetCacheEntry.TABLE_NAME, // The table to query
+ projection, // The columns to return
+ selection, // The columns for the WHERE clause
+ selectionArgs, // The values for the WHERE clause
+ null, // don't group the rows
+ null, // don't filter by row groups
+ null // The sort order
+ );
+ }
+
+ private void removeCachedFiles(SQLiteDatabase db, String selection, String[] selectionArgs) {
+ String[] projection = {AssetCacheEntry.COLUMN_NAME_CACHE_PATH};
+ Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs);
+ if (cursor.moveToFirst()) {
+ do {
+ String cachedPath = cursor.getString(
+ cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_CACHE_PATH));
+ File file = new File(cachedPath);
+ if (file.exists()) {
+ if (!file.delete()) {
+ logger.atWarning().log("Stale cached file: %s can't be deleted.", cachedPath);
+ }
+ }
+ } while (cursor.moveToNext());
+ }
+ cursor.close();
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD
new file mode 100644
index 000000000..e6ad76ed9
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/BUILD
@@ -0,0 +1,84 @@
+# Copyright 2019 The MediaPipe Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+licenses(["notice"]) # Apache 2.0
+
+# MediaPipe Android framework.
+
+exports_files(["proguard.pgcfg"])
+
+android_library(
+ name = "android_framework",
+ proguard_specs = [
+ ":proguard.pgcfg",
+ ],
+ visibility = ["//visibility:public"],
+ exports = [
+ ":android_core",
+ ":android_framework_no_mff",
+ ],
+)
+
+# TODO: Rename android_framework_no_mff.
+android_library(
+ name = "android_framework_no_mff",
+ srcs = glob(
+ ["Android*.java"],
+ ) + [
+ "AssetCache.java",
+ "AssetCacheDbHelper.java",
+ "MediaPipeRunner.java",
+ ],
+ proguard_specs = [
+ ":proguard.pgcfg",
+ ],
+ exports = [
+ ":android_core",
+ ],
+ deps = [
+ ":android_core",
+ "//third_party:androidx_annotation",
+ "//third_party:androidx_legacy_support_v4",
+ "@com_google_code_findbugs//jar",
+ "@com_google_common_flogger//jar",
+ "@com_google_common_flogger_system_backend//jar",
+ "@com_google_guava_android//jar",
+ ],
+)
+
+# This is the Android version of "framework".
+# TODO: unify once allowed by bazel.
+# Note: this is not called "android_framework" for historical reasons (that target
+# also includes other libraries).
+android_library(
+ name = "android_core",
+ srcs = glob(
+ ["**/*.java"],
+ exclude = [
+ "Android*",
+ "AssetCache.java",
+ "AssetCacheDbHelper.java",
+ "MediaPipeRunner.java",
+ ],
+ ),
+ deps = [
+ "//mediapipe/framework:calculator_java_proto_lite",
+ "//mediapipe/framework:calculator_profile_java_proto_lite",
+ "//mediapipe/framework/tool:calculator_graph_template_java_proto_lite",
+ "@com_google_code_findbugs//jar",
+ "@com_google_common_flogger//jar",
+ "@com_google_common_flogger_system_backend//jar",
+ "@com_google_guava_android//jar",
+ ],
+)
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java
new file mode 100644
index 000000000..e66c7e3d7
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Compat.java
@@ -0,0 +1,34 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Utilities for compatibility with old versions of Android.
+ */
+public class Compat {
+ /**
+ * Returns the native handle to the current EGL context. Can be used as a
+ * replacement for EGL14.eglGetCurrentContext().getNativeHandle() before
+ * API 17.
+ */
+ public static native long getCurrentNativeEGLContext();
+
+ /**
+ * Returns the native handle to the current EGL surface. Can be used as a
+ * replacement for EGL14.eglGetCurrentSurface().getNativeHandle() before
+ * API 17.
+ */
+ public static native long getCurrentNativeEGLSurface(int readdraw);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml
new file mode 100644
index 000000000..5c6267293
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/DummyAndroidManifest.xml
@@ -0,0 +1,2 @@
+
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java
new file mode 100644
index 000000000..d32faaf13
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GlSyncToken.java
@@ -0,0 +1,37 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Represents a synchronization point for OpenGL operations. This can be needed when working with
+ * multiple GL contexts.
+ */
+public interface GlSyncToken {
+ /**
+ * Waits until the GPU has executed all commands up to the sync point. This blocks the CPU, and
+ * ensures the commands are complete from the point of view of all threads and contexts.
+ */
+ void waitOnCpu();
+
+ /**
+ * Ensures that the following commands on the current OpenGL context will not be executed until
+ * the sync point has been reached. This does not block the CPU, and only affects the current
+ * OpenGL context.
+ */
+ void waitOnGpu();
+
+ /** Releases the underlying native object. */
+ void release();
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java
new file mode 100644
index 000000000..8b871cc8c
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Graph.java
@@ -0,0 +1,658 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import com.google.common.base.Preconditions;
+import com.google.common.flogger.FluentLogger;
+import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig;
+import com.google.protobuf.InvalidProtocolBufferException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+//import com.google.mediapipe.proto.GraphTemplateProto.CalculatorGraphTemplate;
+//import com.google.protobuf.InvalidProtocolBufferException;
+
+/**
+ * MediaPipe-related context.
+ *
+ *
Main purpose is to faciliate the memory management for native allocated mediapipe objects.
+ */
+public class Graph {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+ private static final int MAX_BUFFER_SIZE = 20;
+ private long nativeGraphHandle;
+ // Hold the references to callbacks.
+ private final List packetCallbacks = new ArrayList<>();
+ private final List packetWithHeaderCallbacks = new ArrayList<>();
+ // Side packets used for running the graph.
+ private Map sidePackets = new HashMap<>();
+ // Stream headers used for running the graph.
+ private Map streamHeaders = new HashMap<>();
+ // The mode of running used by this context.
+ // Based on the value of this mode, the caller can use {@link waitUntilIdle} to synchronize with
+ // the mediapipe native graph runner.
+ private boolean stepMode = false;
+
+ private boolean startRunningGraphCalled = false;
+ private boolean graphRunning = false;
+
+ /** Helper class for a buffered Packet and its timestamp. */
+ private static class PacketBufferItem {
+ private PacketBufferItem(Packet packet, Long timestamp) {
+ this.packet = packet;
+ this.timestamp = timestamp;
+ }
+
+ final Packet packet;
+ final Long timestamp;
+ }
+
+ private Map> packetBuffers = new HashMap<>();
+
+ // This is used for methods that need to ensure the native context is alive
+ // while still allowing other methods of this class to execute concurrently.
+ // Note: if a method needs to acquire both this lock and the Graph intrinsic monitor,
+ // it must acquire the intrinsic monitor first.
+ private final Object terminationLock = new Object();
+
+ public Graph() {
+ nativeGraphHandle = nativeCreateGraph();
+ }
+
+ public synchronized long getNativeHandle() {
+ return nativeGraphHandle;
+ }
+
+ public synchronized void setStepMode(boolean stepMode) {
+ this.stepMode = stepMode;
+ }
+
+ public synchronized boolean getStepMode() {
+ return stepMode;
+ }
+
+ /**
+ * Loads a binary mediapipe graph using an absolute file path.
+ *
+ * @param path An absolute file path to a mediapipe graph. An absolute file path can be obtained
+ * from asset file using {@link AssetCache}.
+ */
+ public synchronized void loadBinaryGraph(String path) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ nativeLoadBinaryGraph(nativeGraphHandle, path);
+ }
+
+ /** Loads a binary mediapipe graph from a byte array. */
+ public synchronized void loadBinaryGraph(byte[] data) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ nativeLoadBinaryGraphBytes(nativeGraphHandle, data);
+ }
+
+ /** Specifies a CalculatorGraphConfig for a mediapipe graph or subgraph. */
+ public synchronized void loadBinaryGraph(CalculatorGraphConfig config) {
+ loadBinaryGraph(config.toByteArray());
+ }
+
+ /** Specifies a CalculatorGraphTemplate for a mediapipe graph or subgraph.
+ public synchronized void loadBinaryGraphTemplate(CalculatorGraphTemplate template) {
+ nativeLoadBinaryGraphTemplate(nativeGraphHandle, template.toByteArray());
+ }
+
+ /** Specifies the CalculatorGraphConfig::type of the top level graph. */
+ public synchronized void setGraphType(String graphType) {
+ nativeSetGraphType(nativeGraphHandle, graphType);
+ }
+
+ /** Specifies options such as template arguments for the graph. */
+ public synchronized void setGraphOptions(CalculatorGraphConfig.Node options) {
+ nativeSetGraphOptions(nativeGraphHandle, options.toByteArray());
+ }
+
+ /**
+ * Returns the canonicalized CalculatorGraphConfig with subgraphs and graph templates expanded.
+ */
+ public synchronized CalculatorGraphConfig getCalculatorGraphConfig() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ byte[] data = nativeGetCalculatorGraphConfig(nativeGraphHandle);
+ if (data != null) {
+ try {
+ return CalculatorGraphConfig.parseFrom(data);
+ } catch (InvalidProtocolBufferException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Adds a {@link PacketCallback} to the context for callback during graph running.
+ *
+ * @param streamName The output stream name in the graph for callback.
+ * @param callback The callback for handling the call when output stream gets a {@link Packet}.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void addPacketCallback(String streamName, PacketCallback callback) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ Preconditions.checkNotNull(streamName);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
+ packetCallbacks.add(callback);
+ nativeAddPacketCallback(nativeGraphHandle, streamName, callback);
+ }
+
+ /**
+ * Adds a {@link PacketWithHeaderCallback} to the context for callback during graph running.
+ *
+ * @param streamName The output stream name in the graph for callback.
+ * @param callback The callback for handling the call when output stream gets a {@link Packet} and
+ * has a stream header.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void addPacketWithHeaderCallback(
+ String streamName, PacketWithHeaderCallback callback) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ Preconditions.checkNotNull(streamName);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
+ packetWithHeaderCallbacks.add(callback);
+ nativeAddPacketWithHeaderCallback(nativeGraphHandle, streamName, callback);
+ }
+
+ /**
+ * Adds a {@link SurfaceOutput} for a stream producing GpuBuffers.
+ *
+ *
Multiple outputs can be attached to the same stream.
+ *
+ * @param streamName The output stream name in the graph.
+ * @result a new SurfaceOutput.
+ */
+ public synchronized SurfaceOutput addSurfaceOutput(String streamName) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ Preconditions.checkNotNull(streamName);
+ Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
+ // TODO: check if graph is loaded.
+ return new SurfaceOutput(
+ this, Packet.create(nativeAddSurfaceOutput(nativeGraphHandle, streamName)));
+ }
+
+ /**
+ * Sets the input side packets needed for running the graph.
+ *
+ * @param sidePackets MediaPipe input side packet name to {@link Packet} map.
+ */
+ public synchronized void setInputSidePackets(Map sidePackets) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
+ for (Map.Entry entry : sidePackets.entrySet()) {
+ this.sidePackets.put(entry.getKey(), entry.getValue().copy());
+ }
+ }
+
+ public synchronized void setServiceObject(GraphService service, T object) {
+ service.installServiceObject(nativeGraphHandle, object);
+ }
+
+ /**
+ * This tells the {@link Graph} before running the graph, we are expecting those headers to be
+ * available first. This function is usually called before the streaming starts.
+ *
+ *
Note: Because of some MediaPipe calculators need statically available header info before the
+ * graph is running, we need to have this to synchronize the running of graph with the
+ * availability of the header streams.
+ */
+ public synchronized void addStreamNameExpectingHeader(String streamName) {
+ Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
+ streamHeaders.put(streamName, null);
+ }
+
+ /**
+ * Sets the stream header for specific stream if the header is not set.
+ *
+ *
If graph is already waiting for being started, start graph when all stream headers are set.
+ *
+ *
Note: If streamHeader is already being set, this call will not override the previous set
+ * value. To override, call the function below instead.
+ */
+ public synchronized void setStreamHeader(String streamName, Packet streamHeader) {
+ setStreamHeader(streamName, streamHeader, false);
+ }
+
+ /**
+ * Sets the stream header for specific stream.
+ *
+ *
If graph is already waiting for being started, start graph when all stream headers are set.
+ *
+ * @param override if true, override the previous set header, however, if graph is running, {@link
+ * IllegalArgumentException} will be thrown.
+ */
+ public synchronized void setStreamHeader(
+ String streamName, Packet streamHeader, boolean override) {
+ Packet header = streamHeaders.get(streamName);
+ if (header != null) {
+ if (override) {
+ if (graphRunning) {
+ throw new IllegalArgumentException(
+ "Can't override an existing stream header, after graph started running.");
+ }
+ header.release();
+ } else {
+ // Don't override, so just return since header is set already.
+ return;
+ }
+ }
+ streamHeaders.put(streamName, streamHeader.copy());
+ if (!graphRunning && startRunningGraphCalled && hasAllStreamHeaders()) {
+ startRunningGraph();
+ }
+ }
+
+ /**
+ * Runs the mediapipe graph until it finishes.
+ *
+ *
Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void runGraphUntilClose() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ Preconditions.checkNotNull(sidePackets);
+ String[] streamNames = new String[sidePackets.size()];
+ long[] packets = new long[sidePackets.size()];
+ splitStreamNamePacketMap(sidePackets, streamNames, packets);
+ nativeRunGraphUntilClose(nativeGraphHandle, streamNames, packets);
+ }
+
+ /**
+ * Starts running the MediaPipe graph.
+ *
+ *
Returns immediately after starting the scheduler.
+ *
+ *
Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void startRunningGraph() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ startRunningGraphCalled = true;
+ if (!hasAllStreamHeaders()) {
+ // Graph will be runned later when all stream headers are assembled.
+ logger.atInfo().log("MediaPipe graph won't start until all stream headers are available.");
+ return;
+ }
+ // Prepare the side packets.
+ String[] sidePacketNames = new String[sidePackets.size()];
+ long[] sidePacketHandles = new long[sidePackets.size()];
+ splitStreamNamePacketMap(sidePackets, sidePacketNames, sidePacketHandles);
+ // Prepare the Stream headers.
+ String[] streamNamesWithHeader = new String[streamHeaders.size()];
+ long[] streamHeaderHandles = new long[streamHeaders.size()];
+ splitStreamNamePacketMap(streamHeaders, streamNamesWithHeader, streamHeaderHandles);
+ nativeStartRunningGraph(
+ nativeGraphHandle,
+ sidePacketNames,
+ sidePacketHandles,
+ streamNamesWithHeader,
+ streamHeaderHandles);
+ // Packets can be buffered before the actual mediapipe graph starts. Send them in now, if we
+ // started successfully.
+ graphRunning = true;
+ moveBufferedPacketsToInputStream();
+ }
+
+ /**
+ * Sets blocking behavior when adding packets to a graph input stream via {@link
+ * addPacketToInputStream}. If set to true, the method will block until all dependent input
+ * streams fall below the maximum queue size set in the graph config. If false, it will return and
+ * not add a packet if any dependent input stream is full. To add a packet unconditionally, set
+ * the maximum queue size to -1 in the graph config.
+ */
+ public synchronized void setGraphInputStreamBlockingMode(boolean mode) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ Preconditions.checkState(!graphRunning);
+ nativeSetGraphInputStreamBlockingMode(nativeGraphHandle, mode);
+ }
+
+ /**
+ * Adds one packet into a graph input stream based on the graph stream input mode.
+ *
+ * @param streamName the name of the input stream.
+ * @param packet the mediapipe packet.
+ * @param timestamp the timestamp of the packet, although not enforced, the unit is normally
+ * microsecond.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void addPacketToInputStream(
+ String streamName, Packet packet, long timestamp) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ if (!graphRunning) {
+ addPacketToBuffer(streamName, packet.copy(), timestamp);
+ } else {
+ nativeAddPacketToInputStream(
+ nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp);
+ }
+ }
+
+ /**
+ * Adds one packet into a graph input stream based on the graph stream input mode. Also
+ * simultaneously yields ownership over to the graph stream, so additional memory optimizations
+ * are possible. When the function ends normally, the packet will be consumed and should no longer
+ * be referenced. When the function ends with MediaPipeException, the packet will remain
+ * unaffected, so this call may be retried later.
+ *
+ * @param streamName the name of the input stream.
+ * @param packet the mediapipe packet.
+ * @param timestamp the timestamp of the packet, although not enforced, the unit is normally
+ * microsecond.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void addConsumablePacketToInputStream(
+ String streamName, Packet packet, long timestamp) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ if (!graphRunning) {
+ addPacketToBuffer(streamName, packet.copy(), timestamp);
+ // Release current packet to honor move semantics.
+ packet.release();
+ } else {
+
+ // We move the packet here into native, allowing it to take full control.
+ nativeMovePacketToInputStream(
+ nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp);
+ // The Java handle is released now if the packet was successfully moved. Otherwise the Java
+ // handle continues to own the packet contents.
+ packet.release();
+ }
+ }
+
+ /**
+ * Closes the specified input stream.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void closeInputStream(String streamName) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ nativeCloseInputStream(nativeGraphHandle, streamName);
+ }
+
+ /**
+ * Closes all the input streams in the mediapipe graph.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void closeAllInputStreams() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ nativeCloseAllInputStreams(nativeGraphHandle);
+ }
+
+ /**
+ * Closes all the input streams and source calculators in the mediapipe graph.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void closeAllPacketSources() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ nativeCloseAllPacketSources(nativeGraphHandle);
+ }
+
+ /**
+ * Waits until the graph is done processing.
+ *
+ *
This should be called after all sources and input streams are closed.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void waitUntilGraphDone() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ nativeWaitUntilGraphDone(nativeGraphHandle);
+ }
+
+ /**
+ * Waits until the graph runner is idle.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void waitUntilGraphIdle() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
+ nativeWaitUntilGraphIdle(nativeGraphHandle);
+ }
+
+ /** Releases the native mediapipe context. */
+ public synchronized void tearDown() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ for (Map.Entry entry : sidePackets.entrySet()) {
+ entry.getValue().release();
+ }
+ sidePackets.clear();
+ for (Map.Entry entry : streamHeaders.entrySet()) {
+ if (entry.getValue() != null) {
+ entry.getValue().release();
+ }
+ }
+ streamHeaders.clear();
+ for (Map.Entry> entry : packetBuffers.entrySet()) {
+ for (PacketBufferItem item : entry.getValue()) {
+ item.packet.release();
+ }
+ }
+ packetBuffers.clear();
+ synchronized (terminationLock) {
+ if (nativeGraphHandle != 0) {
+ nativeReleaseGraph(nativeGraphHandle);
+ nativeGraphHandle = 0;
+ }
+ }
+ packetCallbacks.clear();
+ packetWithHeaderCallbacks.clear();
+ }
+
+ /**
+ * Updates the value of a MediaPipe packet that holds a reference to another MediaPipe packet.
+ *
+ *
This updates a mutable packet. Useful for the caluclator that needs to have an external way
+ * of updating the parameters using input side packets.
+ *
+ *
After calling this, the newPacket can be released (calling newPacket.release()), if no
+ * longer need to use it in Java. The {@code referencePacket} already holds the reference.
+ *
+ * @param referencePacket a mediapipe packet that has the value type Packet*.
+ * @param newPacket the new value for the reference packet to hold.
+ */
+ public synchronized void updatePacketReference(Packet referencePacket, Packet newPacket) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ nativeUpdatePacketReference(
+ referencePacket.getNativeHandle(), newPacket.getNativeHandle());
+ }
+
+ /**
+ * Creates a shared GL runner with the specified name so that MediaPipe calculators can use
+ * OpenGL. This runner should be connected to the calculators by specifiying an input side packet
+ * in the graph file with the same name.
+ *
+ * @throws MediaPipeException for any error status.
+ * @deprecated Call {@link setParentGlContext} to set up texture sharing between contexts. Apart
+ * from that, GL is set up automatically.
+ */
+ @Deprecated
+ public synchronized void createGlRunner(String name, long javaGlContext) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ Preconditions.checkArgument(name.equals("gpu_shared"));
+ setParentGlContext(javaGlContext);
+ }
+
+ /**
+ * Specifies an external GL context to use as the parent of MediaPipe's GL context. This will
+ * enable the sharing of textures and other objects between the two contexts.
+ *
+ *
Cannot be called after the graph has been started.
+ * @throws MediaPipeException for any error status.
+ */
+ public synchronized void setParentGlContext(long javaGlContext) {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ Preconditions.checkState(!graphRunning);
+ nativeSetParentGlContext(nativeGraphHandle, javaGlContext);
+ }
+
+ /**
+ * Cancels the running graph.
+ */
+ public synchronized void cancelGraph() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ nativeCancelGraph(nativeGraphHandle);
+ }
+
+ /** Returns {@link GraphProfiler}. */
+ public GraphProfiler getProfiler() {
+ Preconditions.checkState(
+ nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
+ return new GraphProfiler(nativeGetProfiler(nativeGraphHandle), this);
+ }
+
+ private boolean addPacketToBuffer(String streamName, Packet packet, long timestamp) {
+ if (!packetBuffers.containsKey(streamName)) {
+ packetBuffers.put(streamName, new ArrayList());
+ }
+ List buffer = packetBuffers.get(streamName);
+ if (buffer.size() > MAX_BUFFER_SIZE) {
+ for (Map.Entry entry : streamHeaders.entrySet()) {
+ if (entry.getValue() == null) {
+ logger.atSevere().log("Stream: %s might be missing.", entry.getKey());
+ }
+ }
+ throw new RuntimeException("Graph is not started because of missing streams");
+ }
+ buffer.add(new PacketBufferItem(packet, timestamp));
+ return true;
+ }
+
+ // Any previously-buffered packets should be passed along to our graph. They've already been
+ // copied into our buffers, so it's fine to move them all over to native.
+ private void moveBufferedPacketsToInputStream() {
+ if (!packetBuffers.isEmpty()) {
+ for (Map.Entry> entry : packetBuffers.entrySet()) {
+ for (PacketBufferItem item : entry.getValue()) {
+ try {
+ nativeMovePacketToInputStream(
+ nativeGraphHandle, entry.getKey(), item.packet.getNativeHandle(), item.timestamp);
+ } catch (MediaPipeException e) {
+ logger.atSevere().log(
+ "AddPacket for stream: %s failed: %s.", entry.getKey(), e.getMessage());
+ throw e;
+ }
+ // Need to release successfully moved packets
+ item.packet.release();
+ }
+ }
+ packetBuffers.clear();
+ }
+ }
+
+ private static void splitStreamNamePacketMap(
+ Map namePacketMap, String[] streamNames, long[] packets) {
+ if (namePacketMap.size() != streamNames.length || namePacketMap.size() != packets.length) {
+ throw new RuntimeException("Input array length doesn't match the map size!");
+ }
+ int i = 0;
+ for (Map.Entry entry : namePacketMap.entrySet()) {
+ streamNames[i] = entry.getKey();
+ packets[i] = entry.getValue().getNativeHandle();
+ ++i;
+ }
+ }
+
+ private boolean hasAllStreamHeaders() {
+ for (Map.Entry entry : streamHeaders.entrySet()) {
+ if (entry.getValue() == null) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private native long nativeCreateGraph();
+
+ private native void nativeReleaseGraph(long context);
+
+ private native void nativeAddPacketCallback(
+ long context, String streamName, PacketCallback callback);
+
+ private native void nativeAddPacketWithHeaderCallback(
+ long context, String streamName, PacketWithHeaderCallback callback);
+
+ private native long nativeAddSurfaceOutput(long context, String streamName);
+
+ private native void nativeLoadBinaryGraph(long context, String path);
+
+ private native void nativeLoadBinaryGraphBytes(long context, byte[] data);
+
+ private native void nativeLoadBinaryGraphTemplate(long context, byte[] data);
+
+ private native void nativeSetGraphType(long context, String graphType);
+
+ private native void nativeSetGraphOptions(long context, byte[] data);
+
+ private native byte[] nativeGetCalculatorGraphConfig(long context);
+
+ private native void nativeRunGraphUntilClose(long context, String[] streamNames, long[] packets);
+
+ private native void nativeStartRunningGraph(
+ long context,
+ String[] sidePacketNames,
+ long[] sidePacketHandles,
+ String[] streamNamesWithHeader,
+ long[] streamHeaderHandles);
+
+ private native void nativeAddPacketToInputStream(
+ long context, String streamName, long packet, long timestamp);
+
+ private native void nativeMovePacketToInputStream(
+ long context, String streamName, long packet, long timestamp);
+
+ private native void nativeSetGraphInputStreamBlockingMode(long context, boolean mode);
+
+ private native void nativeCloseInputStream(long context, String streamName);
+
+ private native void nativeCloseAllInputStreams(long context);
+
+ private native void nativeCloseAllPacketSources(long context);
+
+ private native void nativeWaitUntilGraphDone(long context);
+
+ private native void nativeWaitUntilGraphIdle(long context);
+
+ private native void nativeUpdatePacketReference(long referencePacket, long newPacket);
+
+ private native void nativeSetParentGlContext(long context, long javaGlContext);
+
+ private native void nativeCancelGraph(long context);
+
+ private native long nativeGetProfiler(long context);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java
new file mode 100644
index 000000000..c141a95aa
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphGlSyncToken.java
@@ -0,0 +1,56 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Represents a synchronization point for OpenGL operations. It can be used to wait until the GPU
+ * has reached the specified point in the sequence of commands it is executing. This can be
+ * necessary when working with multiple GL contexts.
+ */
+final class GraphGlSyncToken implements GlSyncToken {
+ private long token;
+
+ @Override
+ public void waitOnCpu() {
+ if (token != 0) {
+ nativeWaitOnCpu(token);
+ }
+ }
+
+ @Override
+ public void waitOnGpu() {
+ if (token != 0) {
+ nativeWaitOnGpu(token);
+ }
+ }
+
+ @Override
+ public void release() {
+ if (token != 0) {
+ nativeRelease(token);
+ token = 0;
+ }
+ }
+
+ GraphGlSyncToken(long token) {
+ this.token = token;
+ }
+
+ private static native void nativeWaitOnCpu(long token);
+
+ private static native void nativeWaitOnGpu(long token);
+
+ private static native void nativeRelease(long token);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java
new file mode 100644
index 000000000..8d4016eb8
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphProfiler.java
@@ -0,0 +1,97 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import com.google.common.base.Preconditions;
+import com.google.mediapipe.proto.CalculatorProfileProto.CalculatorProfile;
+import com.google.protobuf.InvalidProtocolBufferException;
+import java.util.ArrayList;
+import java.util.List;
+
+/** MediaPipe Profiler Java API. */
+public class GraphProfiler {
+ private final long nativeProfilerHandle;
+ private final Graph mediapipeGraph;
+
+ GraphProfiler(long nativeProfilerHandle, Graph mediapipeGraph) {
+ Preconditions.checkState(
+ nativeProfilerHandle != 0,
+ "Invalid profiler, tearDown() might have been called already.");
+ this.nativeProfilerHandle = nativeProfilerHandle;
+ this.mediapipeGraph = mediapipeGraph;
+ }
+
+ /**
+ * Resets all the calculator profilers in the graph. This only resets the information about
+ * Process() and does NOT affect information for Open() and Close() methods.
+ */
+ public void reset() {
+ synchronized (mediapipeGraph) {
+ checkContext();
+ nativeReset(nativeProfilerHandle);
+ }
+ }
+
+ /** Resumes all the calculator profilers in the graph. No-op if already profiling. */
+ public void resume() {
+ synchronized (mediapipeGraph) {
+ checkContext();
+ nativeResume(nativeProfilerHandle);
+ }
+ }
+
+ /** Pauses all the calculator profilers in the graph. No-op if already paused. */
+ public void pause() {
+ synchronized (mediapipeGraph) {
+ checkContext();
+ nativePause(nativeProfilerHandle);
+ }
+ }
+
+ /**
+ * Collects the runtime profile for Open(), Process(), and Close() of each calculator in the
+ * graph. May be called at any time after the graph has been initialized.
+ */
+ public List getCalculatorProfiles() {
+ synchronized (mediapipeGraph) {
+ checkContext();
+ byte[][] profileBytes = nativeGetCalculatorProfiles(nativeProfilerHandle);
+ List profileList = new ArrayList<>();
+ for (byte[] element : profileBytes) {
+ try {
+ CalculatorProfile profile = CalculatorProfile.parseFrom(element);
+ profileList.add(profile);
+ } catch (InvalidProtocolBufferException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return profileList;
+ }
+ }
+
+ private void checkContext() {
+ Preconditions.checkState(
+ mediapipeGraph.getNativeHandle() != 0,
+ "Invalid context, tearDown() might have been called already.");
+ }
+
+ private native void nativeReset(long profilingContextHandle);
+
+ private native void nativeResume(long profilingContextHandle);
+
+ private native void nativePause(long profilingContextHandle);
+
+ private native byte[][] nativeGetCalculatorProfiles(long profilingContextHandle);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java
new file mode 100644
index 000000000..2efc0ab9b
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphService.java
@@ -0,0 +1,30 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Implement this interface to wrap a native GraphService.
+ *
+ *
T should be the Java class wrapping the native service object.
+ */
+public interface GraphService {
+ /**
+ * Provides the native service object corresponding to the provided Java object. This must be
+ * handled by calling mediapipe::android::GraphServiceHelper::SetServiceObject in native code,
+ * passing the provided context argument. We do it this way to minimize the number of trips
+ * through JNI and maintain more type safety in the native code.
+ */
+ public void installServiceObject(long context, T object);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java
new file mode 100644
index 000000000..e289ee74e
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/GraphTextureFrame.java
@@ -0,0 +1,95 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * A {@link TextureFrame} that represents a texture produced by MediaPipe.
+ *
+ *
The consumer is typically your application, which should therefore call the {@link #release()}
+ * method.
+ */
+public class GraphTextureFrame implements TextureFrame {
+ private long nativeBufferHandle;
+ // We cache these to be able to get them without a JNI call.
+ private int textureName;
+ private int width;
+ private int height;
+ private long timestamp = Long.MIN_VALUE;
+
+ GraphTextureFrame(long nativeHandle, long timestamp) {
+ nativeBufferHandle = nativeHandle;
+ // TODO: use a single JNI call to fill in all info
+ textureName = nativeGetTextureName(nativeBufferHandle);
+ width = nativeGetWidth(nativeBufferHandle);
+ height = nativeGetHeight(nativeBufferHandle);
+ this.timestamp = timestamp;
+ }
+
+ /** Returns the name of the underlying OpenGL texture. */
+ @Override
+ public int getTextureName() {
+ return textureName;
+ }
+
+ /** Returns the width of the underlying OpenGL texture. */
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ /** Returns the height of the underlying OpenGL texture. */
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public long getTimestamp() {
+ return timestamp;
+ }
+
+ /**
+ * Releases a reference to the underlying buffer.
+ *
+ *
The consumer calls this when it is done using the texture.
+ */
+ @Override
+ public void release() {
+ if (nativeBufferHandle != 0) {
+ nativeReleaseBuffer(nativeBufferHandle);
+ nativeBufferHandle = 0;
+ }
+ }
+
+ /**
+ * Releases a reference to the underlying buffer.
+ *
+ *
This form of the method is called when the consumer is MediaPipe itself. This can occur if a
+ * packet coming out of the graph is sent back into an input stream. Since both the producer and
+ * the consumer use the same context, we do not need to do further synchronization. Note: we do
+ * not currently support GPU sync across multiple graphs. TODO: Application consumers
+ * currently cannot create a GlSyncToken, so they cannot call this method.
+ */
+ @Override
+ public void release(GlSyncToken syncToken) {
+ syncToken.release();
+ release();
+ }
+
+ private native void nativeReleaseBuffer(long nativeHandle);
+ private native int nativeGetTextureName(long nativeHandle);
+ private native int nativeGetWidth(long nativeHandle);
+ private native int nativeGetHeight(long nativeHandle);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java
new file mode 100644
index 000000000..900585770
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeException.java
@@ -0,0 +1,76 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+// Package java.nio.charset is not yet available in all Android apps.
+import static com.google.common.base.Charsets.UTF_8;
+
+/** This class represents an error reported by the MediaPipe framework. */
+public class MediaPipeException extends RuntimeException {
+ public MediaPipeException(int statusCode, String statusMessage) {
+ super(StatusCode.values()[statusCode].description() + ": " + statusMessage);
+ this.statusCode = StatusCode.values()[statusCode];
+ this.statusMessage = statusMessage;
+ }
+
+ // Package base.Charsets is deprecated by package java.nio.charset is not
+ // yet available in all Android apps.
+ @SuppressWarnings("deprecation")
+ MediaPipeException(int code, byte[] message) {
+ this(code, new String(message, UTF_8));
+ }
+
+ public StatusCode getStatusCode() {
+ return statusCode;
+ }
+
+ public String getStatusMessage() {
+ return statusMessage;
+ }
+
+ /** The 17 canonical status codes. */
+ public enum StatusCode {
+ OK("ok"),
+ CANCELLED("canceled"),
+ UNKNOWN("unknown"),
+ INVALID_ARGUMENT("invalid argument"),
+ DEADLINE_EXCEEDED("deadline exceeded"),
+ NOT_FOUND("not found"),
+ ALREADY_EXISTS("already exists"),
+ PERMISSION_DENIED("permission denied"),
+ RESOURCE_EXHAUSTED("resource exhausted"),
+ FAILED_PRECONDITION("failed precondition"),
+ ABORTED("aborted"),
+ OUT_OF_RANGE("out of range"),
+ UNIMPLEMENTED("unimplemented"),
+ INTERNAL("internal"),
+ UNAVAILABLE("unavailable"),
+ DATA_LOSS("data loss"),
+ UNAUTHENTICATED("unauthenticated");
+
+ StatusCode(String description) {
+ this.description = description;
+ }
+
+ public String description() {
+ return description;
+ }
+
+ private final String description;
+ };
+
+ private final StatusCode statusCode;
+ private final String statusMessage;
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java
new file mode 100644
index 000000000..39a527372
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/MediaPipeRunner.java
@@ -0,0 +1,53 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import android.content.Context;
+
+/** {@link MediaPipeRunner} is an abstract class for running MediaPipe graph in Android. */
+public abstract class MediaPipeRunner extends Graph {
+ protected Context context;
+
+ public MediaPipeRunner(Context context) {
+ // Creates a singleton AssetCache.
+ AssetCache.create(context);
+ this.context = context;
+ }
+
+ public void loadBinaryGraphFromAsset(String assetPath) {
+ try {
+ this.loadBinaryGraph(AssetCache.getAssetCache().getAbsolutePathFromAsset(assetPath));
+ } catch (MediaPipeException e) {
+ // TODO: Report this error from MediaPipe.
+ }
+ }
+
+ /**
+ * Starts running the graph.
+ */
+ public abstract void start();
+ /**
+ * Pauses a running graph.
+ */
+ public abstract void pause();
+ /**
+ * Resumes a paused graph.
+ */
+ public abstract void resume();
+ /**
+ * Stops the running graph and releases the resource. Call this in Activity onDestroy callback.
+ */
+ public abstract void release();
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java
new file mode 100644
index 000000000..f34573d0a
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/Packet.java
@@ -0,0 +1,85 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Java wrapper class for a native MediaPipe Packet.
+ *
+ *
To interpret the content of the packet, use {@link PacketGetter}. To create content of a
+ * packet, use {@link PacketCreator}. Java Packet should be released manually when no longer needed.
+ *
+ *
{@link Packet} can also be managed by {@link Graph}, which automatically releases all the
+ * packets in the context, however, we still need to be careful of the memory, and release them as
+ * soon as not needed.
+ */
+public class Packet {
+ // Points to a native Packet.
+ private long nativePacketHandle;
+
+ /**
+ * Creates a Java packet from a native mediapipe packet handle.
+ *
+ * @return A Packet from a native internal::PacketWithContext handle.
+ */
+ public static Packet create(long nativeHandle) {
+ return new Packet(nativeHandle);
+ }
+
+ /**
+ * @return The native handle of the packet.
+ */
+ public long getNativeHandle() {
+ return nativePacketHandle;
+ }
+
+ /** @return The timestamp of the Packet. */
+ public long getTimestamp() {
+ return nativeGetTimestamp(nativePacketHandle);
+ }
+
+ /**
+ * @return a shared copy of the Packet.
+ *
This is essentially increasing the reference count to the data encapsulated in the
+ * native mediapipe packet.
+ */
+ public Packet copy() {
+ return new Packet(nativeCopyPacket(nativePacketHandle));
+ }
+
+ /**
+ * Releases the native allocation of the packet.
+ *
+ *
After the Graph for this packet is torn down, calling this will cause unexpected behavior.
+ * Since Graph tearDown will release all native memories of the Packets it holds.
+ */
+ public void release() {
+ if (nativePacketHandle != 0) {
+ nativeReleasePacket(nativePacketHandle);
+ nativePacketHandle = 0;
+ }
+ }
+
+ // Packet is not intended to be constructed directly.
+ private Packet(long handle) {
+ nativePacketHandle = handle;
+ }
+
+ // Releases the native memeory.
+ private native void nativeReleasePacket(long packetHandle);
+
+ private native long nativeCopyPacket(long packetHandle);
+
+ private native long nativeGetTimestamp(long packetHandle);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java
new file mode 100644
index 000000000..ac8eef874
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCallback.java
@@ -0,0 +1,20 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/** Interface for MediaPipe callback with packet. */
+public interface PacketCallback {
+ public void process(Packet packet);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java
new file mode 100644
index 000000000..9d97504f4
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketCreator.java
@@ -0,0 +1,308 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import com.google.protobuf.MessageLite;
+import java.nio.ByteBuffer;
+
+// TODO: use Preconditions in this file.
+/**
+ * Creates {@link Packet} in the given {@link Graph}.
+ *
+ *
This class provides a set of functions to create basic mediapipe packet types.
+ */
+public class PacketCreator {
+ protected Graph mediapipeGraph;
+
+ public PacketCreator(Graph context) {
+ mediapipeGraph = context;
+ }
+
+ /**
+ * Create a MediaPipe Packet that contains a pointer to another MediaPipe packet.
+ *
+ *
This can be used as a way to update the value of a packet. Similar to a mutable packet using
+ * mediapipe::AdoptAsUniquePtr.
+ *
+ *
The parameter {@code packet} can be released after this call, since the new packet already
+ * holds a reference to it in the native object.
+ */
+ public Packet createReferencePacket(Packet packet) {
+ return Packet.create(
+ nativeCreateReferencePacket(mediapipeGraph.getNativeHandle(), packet.getNativeHandle()));
+ }
+
+ /**
+ * Creates a 3 channel RGB ImageFrame packet from an RGB buffer.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer. The pixel rows should have
+ * 4-byte alignment.
+ */
+ public Packet createRgbImage(ByteBuffer buffer, int width, int height) {
+ int widthStep = (((width * 3) + 3) / 4) * 4;
+ if (widthStep * height != buffer.capacity()) {
+ throw new RuntimeException("The size of the buffer should be: " + widthStep * height);
+ }
+ return Packet.create(
+ nativeCreateRgbImage(mediapipeGraph.getNativeHandle(), buffer, width, height));
+ }
+
+ /**
+ * Create a MediaPipe audio packet that is used by most of the audio calculators.
+ *
+ * @param data the raw audio data, bytes per sample is 2.
+ * @param numChannels number of channels in the raw data.
+ * @param numSamples number of samples in the data.
+ */
+ public Packet createAudioPacket(byte[] data, int numChannels, int numSamples) {
+ if (numChannels * numSamples * 2 != data.length) {
+ throw new RuntimeException("Data doesn't have the correct size.");
+ }
+ return Packet.create(
+ nativeCreateAudioPacket(mediapipeGraph.getNativeHandle(), data, numChannels, numSamples));
+ }
+
+ /**
+ * Creates a 3 channel RGB ImageFrame packet from an RGBA buffer.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
+ */
+ public Packet createRgbImageFromRgba(ByteBuffer buffer, int width, int height) {
+ if (width * height * 4 != buffer.capacity()) {
+ throw new RuntimeException("The size of the buffer should be: " + width * height * 4);
+ }
+ return Packet.create(
+ nativeCreateRgbImageFromRgba(mediapipeGraph.getNativeHandle(), buffer, width, height));
+ }
+
+ /**
+ * Creates a 1 channel ImageFrame packet from an U8 buffer.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
+ */
+ public Packet createGrayscaleImage(ByteBuffer buffer, int width, int height) {
+ if (width * height != buffer.capacity()) {
+ throw new RuntimeException(
+ "The size of the buffer should be: " + width * height + " but is " + buffer.capacity());
+ }
+ return Packet.create(
+ nativeCreateGrayscaleImage(mediapipeGraph.getNativeHandle(), buffer, width, height));
+ }
+
+ /**
+ * Creates a 4 channel RGBA ImageFrame packet from an RGBA buffer.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
+ */
+ public Packet createRgbaImageFrame(ByteBuffer buffer, int width, int height) {
+ if (buffer.capacity() != width * height * 4) {
+ throw new RuntimeException("buffer doesn't have the correct size.");
+ }
+ return Packet.create(
+ nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), buffer, width, height));
+ }
+
+ public Packet createInt16(short value) {
+ return Packet.create(nativeCreateInt16(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createInt32(int value) {
+ return Packet.create(nativeCreateInt32(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createInt64(long value) {
+ return Packet.create(nativeCreateInt64(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createFloat32(float value) {
+ return Packet.create(nativeCreateFloat32(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createFloat64(double value) {
+ return Packet.create(nativeCreateFloat64(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createBool(boolean value) {
+ return Packet.create(nativeCreateBool(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createString(String value) {
+ return Packet.create(nativeCreateString(mediapipeGraph.getNativeHandle(), value));
+ }
+
+ public Packet createInt16Vector(short[] data) {
+ throw new UnsupportedOperationException("Not implemented yet");
+ }
+
+ public Packet createInt32Vector(int[] data) {
+ throw new UnsupportedOperationException("Not implemented yet");
+ }
+
+ public Packet createInt64Vector(long[] data) {
+ throw new UnsupportedOperationException("Not implemented yet");
+ }
+
+ public Packet createFloat32Vector(float[] data) {
+ throw new UnsupportedOperationException("Not implemented yet");
+ }
+
+ public Packet createFloat64Vector(double[] data) {
+ throw new UnsupportedOperationException("Not implemented yet");
+ }
+
+ public Packet createInt32Array(int[] data) {
+ return Packet.create(nativeCreateInt32Array(mediapipeGraph.getNativeHandle(), data));
+ }
+
+ public Packet createFloat32Array(float[] data) {
+ return Packet.create(nativeCreateFloat32Array(mediapipeGraph.getNativeHandle(), data));
+ }
+
+ public Packet createByteArray(byte[] data) {
+ return Packet.create(nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), data));
+ }
+
+ /**
+ * Creates a VideoHeader to be used by the calculator that requires it.
+ *
+ *
Note: we are not populating frame rate and duration. If the calculator needs those values,
+ * the calculator is not suitable here. Modify the calculator to not require those values to work.
+ */
+ public Packet createVideoHeader(int width, int height) {
+ return Packet.create(nativeCreateVideoHeader(mediapipeGraph.getNativeHandle(), width, height));
+ }
+
+ /**
+ * Creates a mediapipe::TimeSeriesHeader, which is used by many audio related calculators.
+ *
+ * @param numChannels number of audio channels.
+ * @param sampleRate sampling rate in Hertz.
+ */
+ public Packet createTimeSeriesHeader(int numChannels, double sampleRate) {
+ return Packet.create(
+ nativeCreateTimeSeriesHeader(mediapipeGraph.getNativeHandle(), numChannels, sampleRate));
+ }
+
+ public Packet createMatrix(int rows, int cols, float[] data) {
+ return Packet.create(nativeCreateMatrix(mediapipeGraph.getNativeHandle(), rows, cols, data));
+ }
+
+ /** Creates a {@link Packet} containing the serialized proto string. */
+ public Packet createSerializedProto(MessageLite message) {
+ return Packet.create(
+ nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), message.toByteArray()));
+ }
+
+ /** Creates a {@link Packet} containing a {@code CalculatorOptions} proto message. */
+ public Packet createCalculatorOptions(MessageLite message) {
+ return Packet.create(
+ nativeCreateCalculatorOptions(mediapipeGraph.getNativeHandle(), message.toByteArray()));
+ }
+
+ /** Creates a {@link Packet} containing the given camera intrinsics. */
+ public Packet createCameraIntrinsics(
+ float fx, float fy, float cx, float cy, float width, float height) {
+ return Packet.create(
+ nativeCreateCameraIntrinsics(
+ mediapipeGraph.getNativeHandle(), fx, fy, cx, cy, width, height));
+ }
+
+ /**
+ * Creates a mediapipe::GpuBuffer with the specified texture name and dimensions.
+ *
+ * @param name the OpenGL texture name.
+ * @param width the width in pixels.
+ * @param height the height in pixels.
+ * @param releaseCallback a callback to be invoked when the mediapipe::GpuBuffer is released. Can be
+ * null.
+ */
+ public Packet createGpuBuffer(
+ int name, int width, int height, TextureReleaseCallback releaseCallback) {
+ return Packet.create(
+ nativeCreateGpuBuffer(
+ mediapipeGraph.getNativeHandle(), name, width, height, releaseCallback));
+ }
+
+ /**
+ * Creates a mediapipe::GpuBuffer with the specified texture name and dimensions.
+ *
+ * @param name the OpenGL texture name.
+ * @param width the width in pixels.
+ * @param height the height in pixels.
+ * @deprecated use {@link #createGpuBuffer(int,int,int,TextureReleaseCallback)} instead.
+ */
+ @Deprecated
+ public Packet createGpuBuffer(int name, int width, int height) {
+ return Packet.create(
+ nativeCreateGpuBuffer(mediapipeGraph.getNativeHandle(), name, width, height, null));
+ }
+
+ /**
+ * Creates a mediapipe::GpuBuffer with the provided {@link TextureFrame}.
+ *
+ *
Note: in order for MediaPipe to be able to access the texture, the application's GL context
+ * must be linked with MediaPipe's. This is ensured by calling {@link
+ * Graph#createGlRunner(String,long)} with the native handle to the application's GL context as
+ * the second argument.
+ */
+ public Packet createGpuBuffer(TextureFrame frame) {
+ return Packet.create(
+ nativeCreateGpuBuffer(
+ mediapipeGraph.getNativeHandle(),
+ frame.getTextureName(),
+ frame.getWidth(),
+ frame.getHeight(),
+ frame));
+ }
+
+ /** Helper callback adaptor to create the Java {@link GlSyncToken}. This is called by JNI code. */
+ private void releaseWithSyncToken(long nativeSyncToken, TextureReleaseCallback releaseCallback) {
+ releaseCallback.release(new GraphGlSyncToken(nativeSyncToken));
+ }
+
+ private native long nativeCreateReferencePacket(long context, long packet);
+ private native long nativeCreateRgbImage(long context, ByteBuffer buffer, int width, int height);
+ private native long nativeCreateAudioPacket(
+ long context, byte[] data, int numChannels, int numSamples);
+ private native long nativeCreateRgbImageFromRgba(
+ long context, ByteBuffer buffer, int width, int height);
+
+ private native long nativeCreateGrayscaleImage(
+ long context, ByteBuffer buffer, int width, int height);
+
+ private native long nativeCreateRgbaImageFrame(
+ long context, ByteBuffer buffer, int width, int height);
+ private native long nativeCreateInt16(long context, short value);
+ private native long nativeCreateInt32(long context, int value);
+ private native long nativeCreateInt64(long context, long value);
+ private native long nativeCreateFloat32(long context, float value);
+ private native long nativeCreateFloat64(long context, double value);
+ private native long nativeCreateBool(long context, boolean value);
+ private native long nativeCreateString(long context, String value);
+ private native long nativeCreateVideoHeader(long context, int width, int height);
+ private native long nativeCreateTimeSeriesHeader(
+ long context, int numChannels, double sampleRate);
+ private native long nativeCreateMatrix(long context, int rows, int cols, float[] data);
+ private native long nativeCreateGpuBuffer(
+ long context, int name, int width, int height, TextureReleaseCallback releaseCallback);
+ private native long nativeCreateInt32Array(long context, int[] data);
+ private native long nativeCreateFloat32Array(long context, float[] data);
+ private native long nativeCreateStringFromByteArray(long context, byte[] data);
+
+ private native long nativeCreateCalculatorOptions(long context, byte[] data);
+
+ private native long nativeCreateCameraIntrinsics(
+ long context, float fx, float fy, float cx, float cy, float width, float height);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java
new file mode 100644
index 000000000..a1a05b175
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketGetter.java
@@ -0,0 +1,303 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import com.google.common.flogger.FluentLogger;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Converts the {@link Packet} to java accessible data types.
+ *
+ *
{@link Packet} is a thin java wrapper for the native MediaPipe packet. This class provides the
+ * extendable conversion needed to access the data in the packet.
+ *
+ *
Note that it is still the developer's responsibility to interpret the data correctly.
+ */
+public final class PacketGetter {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
+ /** Helper class for a list of exactly two Packets. */
+ public static class PacketPair {
+ public PacketPair(Packet first, Packet second) {
+ this.first = first;
+ this.second = second;
+ }
+
+ final Packet first;
+ final Packet second;
+ }
+
+ /**
+ * Returns the {@link Packet} that held in the reference packet.
+ *
+ *
Note: release the returned packet after use.
+ */
+ public static Packet getPacketFromReference(final Packet referencePacket) {
+ return Packet.create(nativeGetPacketFromReference(referencePacket.getNativeHandle()));
+ }
+
+ /**
+ * The {@link Packet} contains a pair of packets, return both of them.
+ *
+ *
Note: release the packets in the pair after use.
+ *
+ * @param packet A MediaPipe packet that contains a pair of packets.
+ */
+ public static PacketPair getPairOfPackets(final Packet packet) {
+ long[] handles = nativeGetPairPackets(packet.getNativeHandle());
+ return new PacketPair(Packet.create(handles[0]), Packet.create(handles[1]));
+ }
+
+ /**
+ * Returns a list of packets that are contained in The {@link Packet}.
+ *
+ *
Note: release the packets in the list after use.
+ *
+ * @param packet A MediaPipe packet that contains a vector of packets.
+ */
+ public static List getVectorOfPackets(final Packet packet) {
+ long[] handles = nativeGetVectorPackets(packet.getNativeHandle());
+ List packets = new ArrayList<>(handles.length);
+ for (long handle : handles) {
+ packets.add(Packet.create(handle));
+ }
+ return packets;
+ }
+
+ public static short getInt16(final Packet packet) {
+ return nativeGetInt16(packet.getNativeHandle());
+ }
+
+ public static int getInt32(final Packet packet) {
+ return nativeGetInt32(packet.getNativeHandle());
+ }
+
+ public static long getInt64(final Packet packet) {
+ return nativeGetInt64(packet.getNativeHandle());
+ }
+
+ public static float getFloat32(final Packet packet) {
+ return nativeGetFloat32(packet.getNativeHandle());
+ }
+
+ public static double getFloat64(final Packet packet) {
+ return nativeGetFloat64(packet.getNativeHandle());
+ }
+
+ public static boolean getBool(final Packet packet) {
+ return nativeGetBool(packet.getNativeHandle());
+ }
+
+ public static String getString(final Packet packet) {
+ return nativeGetString(packet.getNativeHandle());
+ }
+
+ public static byte[] getBytes(final Packet packet) {
+ return nativeGetBytes(packet.getNativeHandle());
+ }
+
+ public static byte[] getProtoBytes(final Packet packet) {
+ return nativeGetProtoBytes(packet.getNativeHandle());
+ }
+
+ public static short[] getInt16Vector(final Packet packet) {
+ return nativeGetInt16Vector(packet.getNativeHandle());
+ }
+
+ public static int[] getInt32Vector(final Packet packet) {
+ return nativeGetInt32Vector(packet.getNativeHandle());
+ }
+
+ public static long[] getInt64Vector(final Packet packet) {
+ return nativeGetInt64Vector(packet.getNativeHandle());
+ }
+
+ public static float[] getFloat32Vector(final Packet packet) {
+ return nativeGetFloat32Vector(packet.getNativeHandle());
+ }
+
+ public static double[] getFloat64Vector(final Packet packet) {
+ return nativeGetFloat64Vector(packet.getNativeHandle());
+ }
+
+ public static int getImageWidth(final Packet packet) {
+ return nativeGetImageWidth(packet.getNativeHandle());
+ }
+
+ public static int getImageHeight(final Packet packet) {
+ return nativeGetImageHeight(packet.getNativeHandle());
+ }
+
+ /**
+ * Returns the native image buffer in ByteBuffer. It assumes the output buffer stores pixels
+ * contiguously. It returns false if this assumption does not hold.
+ *
+ *
Note: this function does not assume the pixel format.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
+ */
+ public static boolean getImageData(final Packet packet, ByteBuffer buffer) {
+ return nativeGetImageData(packet.getNativeHandle(), buffer);
+ }
+
+ /**
+ * Converts an RGB mediapipe image frame packet to an RGBA Byte buffer.
+ *
+ *
Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
+ */
+ public static boolean getRgbaFromRgb(final Packet packet, ByteBuffer buffer) {
+ return nativeGetRgbaFromRgb(packet.getNativeHandle(), buffer);
+ }
+
+ /**
+ * Converts the audio matrix data back into byte data.
+ *
+ *
The matrix is in column major order.
+ */
+ public static byte[] getAudioByteData(final Packet packet) {
+ return nativeGetAudioData(packet.getNativeHandle());
+ }
+
+ /**
+ * Audio data is in MediaPipe Matrix format.
+ *
+ * @return the number of channels in the data.
+ */
+ public static int getAudioDataNumChannels(final Packet packet) {
+ return nativeGetMatrixRows(packet.getNativeHandle());
+ }
+
+ /**
+ * Audio data is in MediaPipe Matrix format.
+ *
+ * @return the number of samples in the data.
+ */
+ public static int getAudioDataNumSamples(final Packet packet) {
+ return nativeGetMatrixCols(packet.getNativeHandle());
+ }
+
+ /**
+ * In addition to the data packet, mediapipe currently also has a separate audio header: {@code
+ * mediapipe::TimeSeriesHeader}.
+ *
+ * @return the number of channel in the header packet.
+ */
+ public static int getTimeSeriesHeaderNumChannels(final Packet packet) {
+ return nativeGetTimeSeriesHeaderNumChannels(packet.getNativeHandle());
+ }
+
+ /**
+ * In addition to the data packet, mediapipe currently also has a separate audio header: {@code
+ * mediapipe::TimeSeriesHeader}.
+ *
+ * @return the sampling rate in the header packet.
+ */
+ public static double getTimeSeriesHeaderSampleRate(final Packet packet) {
+ return nativeGetTimeSeriesHeaderSampleRate(packet.getNativeHandle());
+ }
+
+ /** Gets the width in video header packet. */
+ public static int getVideoHeaderWidth(final Packet packet) {
+ return nativeGetVideoHeaderWidth(packet.getNativeHandle());
+ }
+
+ /** Gets the height in video header packet. */
+ public static int getVideoHeaderHeight(final Packet packet) {
+ return nativeGetVideoHeaderHeight(packet.getNativeHandle());
+ }
+
+ /**
+ * Returns the float array data of the mediapipe Matrix.
+ *
+ *
Underlying packet stores the matrix as {@code ::mediapipe::Matrix}.
+ */
+ public static float[] getMatrixData(final Packet packet) {
+ return nativeGetMatrixData(packet.getNativeHandle());
+ }
+
+ public static int getMatrixRows(final Packet packet) {
+ return nativeGetMatrixRows(packet.getNativeHandle());
+ }
+
+ public static int getMatrixCols(final Packet packet) {
+ return nativeGetMatrixCols(packet.getNativeHandle());
+ }
+
+ /**
+ * Returns the GL texture name of the mediapipe::GpuBuffer.
+ *
+ * @deprecated use {@link #getTextureFrame} instead.
+ */
+ @Deprecated
+ public static int getGpuBufferName(final Packet packet) {
+ return nativeGetGpuBufferName(packet.getNativeHandle());
+ }
+
+ /**
+ * Returns a {@link GraphTextureFrame} referencing a C++ mediapipe::GpuBuffer.
+ *
+ *
Note: in order for the application to be able to use the texture, its GL context must be
+ * linked with MediaPipe's. This is ensured by calling {@link Graph#createGlRunner(String,long)}
+ * with the native handle to the application's GL context as the second argument.
+ */
+ public static GraphTextureFrame getTextureFrame(final Packet packet) {
+ return new GraphTextureFrame(
+ nativeGetGpuBuffer(packet.getNativeHandle()), packet.getTimestamp());
+ }
+
+ private static native long nativeGetPacketFromReference(long nativePacketHandle);
+ private static native long[] nativeGetPairPackets(long nativePacketHandle);
+ private static native long[] nativeGetVectorPackets(long nativePacketHandle);
+
+ private static native short nativeGetInt16(long nativePacketHandle);
+ private static native int nativeGetInt32(long nativePacketHandle);
+ private static native long nativeGetInt64(long nativePacketHandle);
+ private static native float nativeGetFloat32(long nativePacketHandle);
+ private static native double nativeGetFloat64(long nativePacketHandle);
+ private static native boolean nativeGetBool(long nativePacketHandle);
+ private static native String nativeGetString(long nativePacketHandle);
+ private static native byte[] nativeGetBytes(long nativePacketHandle);
+ private static native byte[] nativeGetProtoBytes(long nativePacketHandle);
+ private static native short[] nativeGetInt16Vector(long nativePacketHandle);
+ private static native int[] nativeGetInt32Vector(long nativePacketHandle);
+ private static native long[] nativeGetInt64Vector(long nativePacketHandle);
+ private static native float[] nativeGetFloat32Vector(long nativePacketHandle);
+ private static native double[] nativeGetFloat64Vector(long nativePacketHandle);
+ private static native int nativeGetImageWidth(long nativePacketHandle);
+ private static native int nativeGetImageHeight(long nativePacketHandle);
+ private static native boolean nativeGetImageData(long nativePacketHandle, ByteBuffer buffer);
+ private static native boolean nativeGetRgbaFromRgb(long nativePacketHandle, ByteBuffer buffer);
+ // Retrieves the values that are in the VideoHeader.
+ private static native int nativeGetVideoHeaderWidth(long nativepackethandle);
+ private static native int nativeGetVideoHeaderHeight(long nativepackethandle);
+ // Retrieves the values that are in the mediapipe::TimeSeriesHeader.
+ private static native int nativeGetTimeSeriesHeaderNumChannels(long nativepackethandle);
+
+ private static native double nativeGetTimeSeriesHeaderSampleRate(long nativepackethandle);
+
+ // Audio data in MediaPipe current uses MediaPipe Matrix format type.
+ private static native byte[] nativeGetAudioData(long nativePacketHandle);
+ // Native helper functions to access the MediaPipe Matrix data.
+ private static native float[] nativeGetMatrixData(long nativePacketHandle);
+
+ private static native int nativeGetMatrixRows(long nativePacketHandle);
+ private static native int nativeGetMatrixCols(long nativePacketHandle);
+ private static native int nativeGetGpuBufferName(long nativePacketHandle);
+ private static native long nativeGetGpuBuffer(long nativePacketHandle);
+
+ private PacketGetter() {}
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java
new file mode 100644
index 000000000..9803254f9
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/PacketWithHeaderCallback.java
@@ -0,0 +1,20 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/** Interface for MediaPipe callback with packet and packet header. */
+public interface PacketWithHeaderCallback {
+ public void process(Packet packet, Packet packetHeader);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java
new file mode 100644
index 000000000..454ff2c2c
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/SurfaceOutput.java
@@ -0,0 +1,79 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+import javax.annotation.Nullable;
+
+/**
+ * Outputs a MediaPipe video stream to an {@link android.opengl.EGLSurface}.
+ *
+ *
Should be created using {@link Graph#addEglSurfaceOutput}.
+ */
+public class SurfaceOutput {
+ private Packet surfaceHolderPacket;
+ private Graph mediapipeGraph;
+
+ SurfaceOutput(Graph context, Packet holderPacket) {
+ mediapipeGraph = context;
+ surfaceHolderPacket = holderPacket;
+ }
+
+ /**
+ * Sets vertical flipping of the output surface, useful for conversion between coordinate systems
+ * with top-left v.s. bottom-left origins. This should be called before {@link
+ * #setSurface(Object)} or {@link #setEglSurface(long)}.
+ */
+ public void setFlipY(boolean flip) {
+ nativeSetFlipY(surfaceHolderPacket.getNativeHandle(), flip);
+ }
+
+ /**
+ * Connects an Android {@link Surface} to an output.
+ *
+ *
This creates the requisite {@link EGLSurface} internally. If one has already been created
+ * for this Surface outside of MediaPipe, the call will fail.
+ *
+ *
Note that a given Surface can only be connected to one output. If you wish to move it to a
+ * different output, first call {@code setSurface(null)} on the old output.
+ *
+ * @param surface The surface to connect. Can be {@code null}.
+ */
+ public void setSurface(@Nullable Object surface) {
+ nativeSetSurface(
+ mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), surface);
+ }
+
+ /**
+ * Connects an EGL surface to an output.
+ *
+ *
NOTE: The surface needs to be compatible with the GL context used by MediaPipe. In practice
+ * this means the EGL context that created the surface should use the same config as used by the
+ * MediaPipe GL context, otherwise the surface sink calculator will fail with {@code
+ * EGL_BAD_MATCH}.
+ *
+ * @param nativeEglSurface Native handle to the egl surface.
+ */
+ public void setEglSurface(long nativeEglSurface) {
+ nativeSetEglSurface(
+ mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), nativeEglSurface);
+ }
+
+ private native void nativeSetFlipY(long nativePacket, boolean flip);
+
+ private native void nativeSetSurface(
+ long nativeContext, long nativePacket, Object surface);
+ private native void nativeSetEglSurface(
+ long nativeContext, long nativePacket, long nativeEglSurface);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java
new file mode 100644
index 000000000..babfd2958
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureFrame.java
@@ -0,0 +1,62 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * Interface for a video frame that can be accessed as a texture.
+ *
+ *
This interface defines a producer/consumer relationship between the component that originates
+ * the TextureFrame and the component that receives it. The consumer must call {@link
+ * #release()} when it is done using the frame. This gives the producer the opportunity to recycle
+ * the resource.
+ *
+ *
When your application sends a TextureFrame into a MediaPipe graph, the application is the
+ * producer and MediaPipe is the consumer. MediaPipe will call the release() method when all copies
+ * of the packet holding the texture have been destroyed.
+ *
+ *
When MediaPipe sends a TextureFrame to the application, MediaPipe is the producer and the
+ * application is the consumer. The application should call the release() method.
+ *
+ *
You can also send a TextureFrame from a component of your application to another. In this
+ * case, the receiving component is the consumer, and should call release(). This can be useful, for
+ * instance, if your application requires a "raw" mode where frames are sent directly from the video
+ * source to the renderer, bypassing MediaPipe.
+ */
+public interface TextureFrame extends TextureReleaseCallback {
+ /** The OpenGL name of the texture. */
+ int getTextureName();
+
+ /** Width of the frame in pixels. */
+ int getWidth();
+
+ /** Height of the frame in pixels. */
+ int getHeight();
+
+ /** The presentation time of the frame in microseconds **/
+ long getTimestamp();
+
+ /**
+ * The consumer that receives this TextureFrame must call this method to inform the provider that
+ * it is done with it.
+ */
+ void release();
+
+ /**
+ * If this texture is provided to MediaPipe, this method will be called when it is released. The
+ * {@link GlSyncToken} can be used to wait for the GPU to be entirely done reading the texture.
+ */
+ @Override
+ void release(GlSyncToken syncToken);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java
new file mode 100644
index 000000000..e84d89358
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/TextureReleaseCallback.java
@@ -0,0 +1,27 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.mediapipe.framework;
+
+/**
+ * A callback that gets invoked when a texture is no longer in use.
+ */
+public interface TextureReleaseCallback {
+ /**
+ * Called when the texture has been released. The sync token can be used to ensure that the GPU is
+ * done reading from it. Implementations of this interface should release the token once they are
+ * done with it.
+ */
+ void release(GlSyncToken syncToken);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD
new file mode 100644
index 000000000..182226cbb
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/BUILD
@@ -0,0 +1,143 @@
+# Copyright 2019 The MediaPipe Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+licenses(["notice"]) # Apache 2.0
+
+package(
+ default_visibility = ["//visibility:public"],
+ features = ["no_layering_check"],
+)
+
+alias(
+ name = "mediapipe_android_framework_jni",
+ actual = ":mediapipe_framework_jni",
+)
+
+cc_library(
+ name = "mediapipe_framework_jni",
+ srcs = [
+ "compat_jni.cc",
+ "graph.cc",
+ "graph_jni.cc",
+ "graph_service_jni.cc",
+ "packet_context_jni.cc",
+ "packet_creator_jni.cc",
+ "packet_getter_jni.cc",
+ "graph_profiler_jni.cc",
+ ] + select({
+ "//conditions:default": [],
+ "//mediapipe:android": [
+ "android_asset_util_jni.cc",
+ "android_packet_creator_jni.cc",
+ ],
+ }) + select({
+ "//conditions:default": [
+ "graph_gl_sync_token.cc",
+ "graph_texture_frame_jni.cc",
+ "surface_output_jni.cc",
+ ],
+ "//mediapipe/gpu:disable_gpu": [],
+ }),
+ hdrs = [
+ "colorspace.h",
+ "compat_jni.h",
+ "graph.h",
+ "graph_jni.h",
+ "graph_service_jni.h",
+ "packet_context_jni.h",
+ "packet_creator_jni.h",
+ "packet_getter_jni.h",
+ "graph_profiler_jni.h",
+ ] + select({
+ "//conditions:default": [],
+ "//mediapipe:android": [
+ "android_asset_util_jni.h",
+ "android_packet_creator_jni.h",
+ ],
+ }) + select({
+ "//conditions:default": [
+ "graph_gl_sync_token.h",
+ "graph_texture_frame_jni.h",
+ "surface_output_jni.h",
+ ],
+ "//mediapipe/gpu:disable_gpu": [],
+ }),
+ linkopts = select({
+ "//conditions:default": [],
+ "//mediapipe:android": [
+ "-ljnigraphics",
+ "-lEGL", # This is needed by compat_jni even if GPU is disabled.
+ ],
+ }),
+ visibility = ["//visibility:public"],
+ deps = [
+ ":jni_util",
+ "//mediapipe/framework:calculator_framework",
+ "//mediapipe/framework:calculator_profile_cc_proto",
+ "//mediapipe/framework/tool:calculator_graph_template_cc_proto",
+ "//mediapipe/framework/formats:image_format_cc_proto",
+ "//mediapipe/framework/formats:matrix_data_cc_proto",
+ "//mediapipe/framework/formats:time_series_header_cc_proto",
+ "@com_google_absl//absl/strings",
+ "@com_google_absl//absl/synchronization",
+ "@eigen_archive//:eigen",
+ "//mediapipe/framework:camera_intrinsics",
+ "//mediapipe/framework/formats:image_frame",
+ "//mediapipe/framework/formats:matrix",
+ "//mediapipe/framework/formats:video_stream_header",
+ "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
+ "//mediapipe/framework/tool:name_util",
+ "//mediapipe/framework/tool:executor_util",
+ "//mediapipe/framework/port:core_proto",
+ "//mediapipe/framework/port:logging",
+ "//mediapipe/framework/port:threadpool",
+ "//mediapipe/framework/port:singleton",
+ "//mediapipe/framework/port:status",
+ ] + select({
+ "//conditions:default": [
+ "//mediapipe/framework/port:file_helpers",
+ ],
+ "//mediapipe:android": [
+ "//mediapipe/util/android/file/base",
+ "//mediapipe/util/android:asset_manager_util",
+ ],
+ }) + select({
+ "//conditions:default": [
+ "//mediapipe/gpu:gl_quad_renderer",
+ "//mediapipe/gpu:gl_calculator_helper",
+ "//mediapipe/gpu:gl_surface_sink_calculator",
+ "//mediapipe/gpu:gpu_shared_data_internal",
+ "//mediapipe/gpu:graph_support",
+ ],
+ "//mediapipe/gpu:disable_gpu": [
+ "//mediapipe/gpu:gpu_shared_data_internal",
+ ],
+ }),
+ alwayslink = 1,
+)
+
+cc_library(
+ name = "jni_util",
+ srcs = (["jni_util.cc"]),
+ hdrs = (["jni_util.h"]),
+ deps = [
+ "@com_google_absl//absl/synchronization",
+ "//mediapipe/framework/port:logging",
+ ] + select({
+ "//conditions:default": [
+ ],
+ "//mediapipe:android": [
+ ],
+ }),
+)
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA
new file mode 100644
index 000000000..cbc57f510
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/METADATA
@@ -0,0 +1,7 @@
+tricorder: {
+ options: {
+ builder: {
+ config: "android_arm"
+ }
+ }
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc
new file mode 100644
index 000000000..71a36b665
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.cc
@@ -0,0 +1,33 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "mediapipe/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h"
+
+#include
+
+#include "mediapipe/framework/port/logging.h"
+#include "mediapipe/framework/port/singleton.h"
+#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
+#include "mediapipe/util/android/asset_manager_util.h"
+
+JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD(
+ nativeInitializeAssetManager)(JNIEnv* env, jclass clz,
+ jobject android_context,
+ jstring cache_dir_path) {
+ mediapipe::AssetManager* asset_manager =
+ Singleton::get();
+ return asset_manager->InitializeFromActivity(
+ env, android_context,
+ mediapipe::android::JStringToStdString(env, cache_dir_path));
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h
new file mode 100644
index 000000000..c842433ff
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h
@@ -0,0 +1,36 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_
+#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif // __cplusplus
+
+#define ANDROID_ASSET_UTIL_METHOD(METHOD_NAME) \
+ Java_com_google_mediapipe_framework_AndroidAssetUtil_##METHOD_NAME
+
+JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD(
+ nativeInitializeAssetManager)(JNIEnv* env, jclass clz,
+ jobject android_context,
+ jstring cache_dir_path);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc
new file mode 100644
index 000000000..2f71e649c
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc
@@ -0,0 +1,117 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h"
+
+#include
+
+#include
+#include
+
+#include "absl/memory/memory.h"
+#include "mediapipe/framework/formats/image_format.pb.h"
+#include "mediapipe/framework/formats/image_frame.h"
+#include "mediapipe/framework/port/logging.h"
+#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h"
+#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
+
+namespace {
+
+// Creates a new internal::PacketWithContext object, and returns the native
+// handle.
+int64_t CreatePacketWithContext(jlong context,
+ const mediapipe::Packet& packet) {
+ mediapipe::android::Graph* mediapipe_graph =
+ reinterpret_cast(context);
+ return mediapipe_graph->WrapPacketIntoContext(packet);
+}
+
+} // namespace
+
+JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
+ nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context,
+ jobject bitmap) {
+ AndroidBitmapInfo info;
+ int result = AndroidBitmap_getInfo(env, bitmap, &info);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result;
+ return 0L;
+ }
+ if (info.stride != info.width * 4) {
+ LOG(ERROR) << "Bitmap stride: " << info.stride
+ << "is not equal to 4 times bitmap width: " << info.width;
+ return 0L;
+ }
+ auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
+ mediapipe::ImageFormat::SRGB, info.width, info.height,
+ ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
+ void* pixel_addr = nullptr;
+ result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code "
+ << result;
+ return 0L;
+ }
+ const uint8_t* rgba_data = static_cast(pixel_addr);
+ mediapipe::android::RgbaToRgb(rgba_data, info.stride, info.width, info.height,
+ image_frame->MutablePixelData(),
+ image_frame->WidthStep());
+ result = AndroidBitmap_unlockPixels(env, bitmap);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code "
+ << result;
+ return 0L;
+ }
+ mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
+ return CreatePacketWithContext(context, packet);
+}
+
+JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
+ nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context,
+ jobject bitmap) {
+ AndroidBitmapInfo info;
+ int result = AndroidBitmap_getInfo(env, bitmap, &info);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result;
+ return 0L;
+ }
+ auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
+ mediapipe::ImageFormat::SRGBA, info.width, info.height,
+ ::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
+ int64_t buffer_size = info.stride * info.height;
+ if (buffer_size != image_frame->PixelDataSize()) {
+ LOG(ERROR) << "Bitmap stride: " << info.stride
+ << " times bitmap height: " << info.height
+ << " is not equal to the expected size: "
+ << image_frame->PixelDataSize();
+ return 0L;
+ }
+ void* pixel_addr = nullptr;
+ result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code "
+ << result;
+ return 0L;
+ }
+ std::memcpy(image_frame->MutablePixelData(), pixel_addr,
+ image_frame->PixelDataSize());
+ result = AndroidBitmap_unlockPixels(env, bitmap);
+ if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
+ LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code "
+ << result;
+ return 0L;
+ }
+ mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
+ return CreatePacketWithContext(context, packet);
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h
new file mode 100644
index 000000000..a1fc587d9
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h
@@ -0,0 +1,39 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_
+#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif // __cplusplus
+
+#define ANDROID_PACKET_CREATOR_METHOD(METHOD_NAME) \
+ Java_com_google_mediapipe_framework_AndroidPacketCreator_##METHOD_NAME
+
+JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
+ nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context,
+ jobject bitmap);
+
+JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
+ nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context,
+ jobject bitmap);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h
new file mode 100644
index 000000000..f5ad09acd
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/colorspace.h
@@ -0,0 +1,60 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_
+#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_
+
+#include
+
+namespace mediapipe {
+namespace android {
+// TODO: switch to more efficient implementation, like halide later.
+
+// Converts an RGBA image to RGB
+inline void RgbaToRgb(const uint8_t* rgba_img, int rgba_width_step, int width,
+ int height, uint8_t* rgb_img, int rgb_width_step) {
+ for (int y = 0; y < height; ++y) {
+ const auto* rgba = rgba_img + y * rgba_width_step;
+ auto* rgb = rgb_img + y * rgb_width_step;
+ for (int x = 0; x < width; ++x) {
+ *rgb = *rgba;
+ *(rgb + 1) = *(rgba + 1);
+ *(rgb + 2) = *(rgba + 2);
+ rgb += 3;
+ rgba += 4;
+ }
+ }
+}
+
+// Converts a RGB image to RGBA
+inline void RgbToRgba(const uint8_t* rgb_img, int rgb_width_step, int width,
+ int height, uint8_t* rgba_img, int rgba_width_step,
+ uint8_t alpha) {
+ for (int y = 0; y < height; ++y) {
+ const auto* rgb = rgb_img + y * rgb_width_step;
+ auto* rgba = rgba_img + y * rgba_width_step;
+ for (int x = 0; x < width; ++x) {
+ *rgba = *rgb;
+ *(rgba + 1) = *(rgb + 1);
+ *(rgba + 2) = *(rgb + 2);
+ *(rgba + 3) = alpha;
+ rgb += 3;
+ rgba += 4;
+ }
+ }
+}
+
+} // namespace android
+} // namespace mediapipe
+#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc
new file mode 100644
index 000000000..894116848
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.cc
@@ -0,0 +1,27 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "mediapipe/java/com/google/mediapipe/framework/jni/compat_jni.h"
+
+#include
+
+JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env,
+ jclass clz) {
+ return reinterpret_cast(eglGetCurrentContext());
+}
+
+JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)(
+ JNIEnv* env, jclass clz, jint readdraw) {
+ return reinterpret_cast(eglGetCurrentSurface(readdraw));
+}
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h
new file mode 100644
index 000000000..2d12cc6cd
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/compat_jni.h
@@ -0,0 +1,37 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_
+#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif // __cplusplus
+
+#define COMPAT_METHOD(METHOD_NAME) \
+ Java_com_google_mediapipe_framework_Compat_##METHOD_NAME
+
+JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env,
+ jclass clz);
+
+JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)(
+ JNIEnv* env, jclass clz, jint readdraw);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc
new file mode 100644
index 000000000..e26123c1c
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.cc
@@ -0,0 +1,600 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
+
+#include
+
+#include
+
+#include "absl/strings/str_cat.h"
+#include "absl/strings/str_format.h"
+#include "absl/synchronization/mutex.h"
+#include "mediapipe/framework/calculator_framework.h"
+#include "mediapipe/framework/port/canonical_errors.h"
+#include "mediapipe/framework/port/logging.h"
+#include "mediapipe/framework/port/proto_ns.h"
+#include "mediapipe/framework/port/status.h"
+#include "mediapipe/framework/port/threadpool.h"
+#include "mediapipe/framework/tool/executor_util.h"
+#include "mediapipe/framework/tool/name_util.h"
+#include "mediapipe/gpu/gpu_shared_data_internal.h"
+#include "mediapipe/gpu/graph_support.h"
+#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
+#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_context_jni.h"
+#ifdef __ANDROID__
+#include "mediapipe/util/android/file/base/helpers.h"
+#else
+#include "mediapipe/framework/port/file_helpers.h"
+#endif // __ANDROID__
+#ifndef MEDIAPIPE_DISABLE_GPU
+#include "mediapipe/gpu/egl_surface_holder.h"
+#endif // !defined(MEDIAPIPE_DISABLE_GPU)
+
+namespace mediapipe {
+namespace android {
+
+namespace internal {
+// PacketWithContext is the native counterpart of the Java Packet.
+class PacketWithContext {
+ public:
+ PacketWithContext(Graph* context, const Packet& packet)
+ : context_(context), packet_(packet) {}
+
+ ~PacketWithContext() {}
+
+ Graph* GetContext() { return context_; }
+
+ Packet& packet() { return packet_; }
+
+ private:
+ Graph* context_;
+ Packet packet_;
+};
+
+// A callback handler that wraps the java callback, and submits it for
+// execution through Graph.
+class CallbackHandler {
+ public:
+ CallbackHandler(Graph* context, jobject callback)
+ : context_(context), java_callback_(callback) {}
+
+ ~CallbackHandler() {
+ // The jobject global reference is managed by the Graph directly.
+ // So no-op here.
+ if (java_callback_) {
+ LOG(ERROR) << "Java callback global reference is not released.";
+ }
+ }
+
+ void PacketCallback(const Packet& packet) {
+ context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_,
+ packet);
+ }
+
+ void PacketWithHeaderCallback(const Packet& packet, const Packet& header) {
+ context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_,
+ packet, header);
+ }
+
+ std::function CreateCallback() {
+ return std::bind(&CallbackHandler::PacketCallback, this,
+ std::placeholders::_1);
+ }
+
+ std::function CreateCallbackWithHeader() {
+ return std::bind(&CallbackHandler::PacketWithHeaderCallback, this,
+ std::placeholders::_1, std::placeholders::_2);
+ }
+
+ // Releases the global reference to the java callback object.
+ // This is called by the Graph, since releasing of a jni object
+ // requires JNIEnv object that we can not keep a copy of.
+ void ReleaseCallback(JNIEnv* env) {
+ env->DeleteGlobalRef(java_callback_);
+ java_callback_ = nullptr;
+ }
+
+ private:
+ Graph* context_;
+ // java callback object
+ jobject java_callback_;
+};
+} // namespace internal
+
+Graph::Graph()
+ : executor_stack_size_increased_(false), global_java_packet_cls_(nullptr) {}
+
+Graph::~Graph() {
+ if (running_graph_) {
+ running_graph_->Cancel();
+ running_graph_->WaitUntilDone().IgnoreError();
+ }
+ // Cleans up the jni objects.
+ JNIEnv* env = mediapipe::java::GetJNIEnv();
+ if (env == nullptr) {
+ LOG(ERROR) << "Can't attach to java thread, no jni clean up performed.";
+ return;
+ }
+ for (const auto& handler : callback_handlers_) {
+ handler->ReleaseCallback(env);
+ }
+ if (global_java_packet_cls_) {
+ env->DeleteGlobalRef(global_java_packet_cls_);
+ global_java_packet_cls_ = nullptr;
+ }
+}
+
+int64_t Graph::WrapPacketIntoContext(const Packet& packet) {
+ absl::MutexLock lock(&all_packets_mutex_);
+ auto packet_context = new internal::PacketWithContext(this, packet);
+ // Since the value of the all_packets_ map is a unique_ptr, resets it with the
+ // new allocated object.
+ all_packets_[packet_context].reset(packet_context);
+ VLOG(2) << "Graph packet reference buffer size: " << all_packets_.size();
+ return reinterpret_cast(packet_context);
+}
+
+// static
+Packet Graph::GetPacketFromHandle(int64_t packet_handle) {
+ internal::PacketWithContext* packet_with_context =
+ reinterpret_cast(packet_handle);
+ return packet_with_context->packet();
+}
+
+// static
+Graph* Graph::GetContextFromHandle(int64_t packet_handle) {
+ internal::PacketWithContext* packet_with_context =
+ reinterpret_cast(packet_handle);
+ return packet_with_context->GetContext();
+}
+
+// static
+bool Graph::RemovePacket(int64_t packet_handle) {
+ internal::PacketWithContext* packet_with_context =
+ reinterpret_cast(packet_handle);
+ Graph* context = packet_with_context->GetContext();
+ absl::MutexLock lock(&(context->all_packets_mutex_));
+ return context->all_packets_.erase(packet_with_context) != 0;
+}
+
+void Graph::EnsureMinimumExecutorStackSizeForJava() {}
+
+::mediapipe::Status Graph::AddCallbackHandler(std::string output_stream_name,
+ jobject java_callback) {
+ if (!graph_config()) {
+ return ::mediapipe::InternalError("Graph is not loaded!");
+ }
+ std::unique_ptr handler(
+ new internal::CallbackHandler(this, java_callback));
+ std::string side_packet_name;
+ tool::AddCallbackCalculator(output_stream_name, graph_config(),
+ &side_packet_name,
+ /* use_std_function = */ true);
+ EnsureMinimumExecutorStackSizeForJava();
+ side_packets_callbacks_.emplace(
+ side_packet_name, MakePacket>(
+ handler->CreateCallback()));
+ callback_handlers_.emplace_back(std::move(handler));
+ return ::mediapipe::OkStatus();
+}
+
+::mediapipe::Status Graph::AddCallbackWithHeaderHandler(
+ std::string output_stream_name, jobject java_callback) {
+ if (!graph_config()) {
+ return ::mediapipe::InternalError("Graph is not loaded!");
+ }
+ std::unique_ptr handler(
+ new internal::CallbackHandler(this, java_callback));
+ std::string side_packet_name;
+ tool::AddCallbackWithHeaderCalculator(output_stream_name, output_stream_name,
+ graph_config(), &side_packet_name,
+ /* use_std_function = */ true);
+ EnsureMinimumExecutorStackSizeForJava();
+ side_packets_callbacks_.emplace(
+ side_packet_name,
+ MakePacket>(
+ handler->CreateCallbackWithHeader()));
+ callback_handlers_.emplace_back(std::move(handler));
+ return ::mediapipe::OkStatus();
+}
+
+int64_t Graph::AddSurfaceOutput(const std::string& output_stream_name) {
+ if (!graph_config()) {
+ LOG(ERROR) << "Graph is not loaded!";
+ return 0;
+ }
+
+#ifdef MEDIAPIPE_DISABLE_GPU
+ LOG(FATAL) << "GPU support has been disabled in this build!";
+#else
+ CalculatorGraphConfig::Node* sink_node = graph_config()->add_node();
+ sink_node->set_name(::mediapipe::tool::GetUnusedNodeName(
+ *graph_config(), absl::StrCat("egl_surface_sink_", output_stream_name)));
+ sink_node->set_calculator("GlSurfaceSinkCalculator");
+ sink_node->add_input_stream(output_stream_name);
+ sink_node->add_input_side_packet(
+ absl::StrCat(kGpuSharedTagName, ":", kGpuSharedSidePacketName));
+
+ const std::string input_side_packet_name =
+ ::mediapipe::tool::GetUnusedSidePacketName(
+ *graph_config(), absl::StrCat(output_stream_name, "_surface"));
+ sink_node->add_input_side_packet(
+ absl::StrCat("SURFACE:", input_side_packet_name));
+
+ auto it_inserted = output_surface_side_packets_.emplace(
+ input_side_packet_name,
+ AdoptAsUniquePtr(new mediapipe::EglSurfaceHolder()));
+
+ return WrapPacketIntoContext(it_inserted.first->second);
+#endif // defined(MEDIAPIPE_DISABLE_GPU)
+}
+
+::mediapipe::Status Graph::LoadBinaryGraph(std::string path_to_graph) {
+ std::string graph_config_string;
+ ::mediapipe::Status status =
+ mediapipe::file::GetContents(path_to_graph, &graph_config_string);
+ if (!status.ok()) {
+ return status;
+ }
+ return LoadBinaryGraph(graph_config_string.c_str(),
+ graph_config_string.length());
+}
+
+::mediapipe::Status Graph::LoadBinaryGraph(const char* data, int size) {
+ CalculatorGraphConfig graph_config;
+ if (!graph_config.ParseFromArray(data, size)) {
+ return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
+ }
+ graph_configs_.push_back(graph_config);
+ return ::mediapipe::OkStatus();
+}
+
+::mediapipe::Status Graph::LoadBinaryGraphTemplate(const char* data, int size) {
+ CalculatorGraphTemplate graph_template;
+ if (!graph_template.ParseFromArray(data, size)) {
+ return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
+ }
+ graph_templates_.push_back(graph_template);
+ return ::mediapipe::OkStatus();
+}
+
+::mediapipe::Status Graph::SetGraphType(std::string graph_type) {
+ graph_type_ = graph_type;
+ return ::mediapipe::OkStatus();
+}
+
+::mediapipe::Status Graph::SetGraphOptions(const char* data, int size) {
+ if (!graph_options_.ParseFromArray(data, size)) {
+ return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
+ }
+ return ::mediapipe::OkStatus();
+}
+
+CalculatorGraphConfig Graph::GetCalculatorGraphConfig() {
+ CalculatorGraph temp_graph;
+ ::mediapipe::Status status = InitializeGraph(&temp_graph);
+ if (!status.ok()) {
+ LOG(ERROR) << "GetCalculatorGraphConfig failed:\n" << status.message();
+ }
+ return temp_graph.Config();
+}
+
+void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj,
+ const Packet& packet) {
+ jclass callback_cls = env->GetObjectClass(java_callback_obj);
+ jmethodID processMethod = env->GetMethodID(
+ callback_cls, "process",
+ absl::StrFormat("(L%s;)V", std::string(Graph::kJavaPacketClassName))
+ .c_str());
+
+ int64_t packet_handle = WrapPacketIntoContext(packet);
+ // Creates a Java Packet.
+ VLOG(2) << "Creating java packet preparing for callback to java.";
+ jobject java_packet =
+ CreateJavaPacket(env, global_java_packet_cls_, packet_handle);
+ VLOG(2) << "Calling java callback.";
+ env->CallVoidMethod(java_callback_obj, processMethod, java_packet);
+ // release the packet after callback.
+ RemovePacket(packet_handle);
+ env->DeleteLocalRef(callback_cls);
+ env->DeleteLocalRef(java_packet);
+ VLOG(2) << "Returned from java callback.";
+}
+
+void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj,
+ const Packet& packet, const Packet& header_packet) {
+ jclass callback_cls = env->GetObjectClass(java_callback_obj);
+ jmethodID processMethod = env->GetMethodID(
+ callback_cls, "process",
+ absl::StrFormat("(L%s;L%s;)V", std::string(Graph::kJavaPacketClassName),
+ std::string(Graph::kJavaPacketClassName))
+ .c_str());
+
+ int64_t packet_handle = WrapPacketIntoContext(packet);
+ int64_t header_packet_handle = WrapPacketIntoContext(header_packet);
+ // Creates a Java Packet.
+ jobject java_packet =
+ CreateJavaPacket(env, global_java_packet_cls_, packet_handle);
+ jobject java_header_packet =
+ CreateJavaPacket(env, global_java_packet_cls_, header_packet_handle);
+ env->CallVoidMethod(java_callback_obj, processMethod, java_packet,
+ java_header_packet);
+ // release the packet after callback.
+ RemovePacket(packet_handle);
+ RemovePacket(header_packet_handle);
+ env->DeleteLocalRef(callback_cls);
+ env->DeleteLocalRef(java_packet);
+ env->DeleteLocalRef(java_header_packet);
+}
+
+void Graph::SetPacketJavaClass(JNIEnv* env) {
+ if (global_java_packet_cls_ == nullptr) {
+ jclass packet_cls =
+ env->FindClass(mediapipe::android::Graph::kJavaPacketClassName);
+ global_java_packet_cls_ =
+ reinterpret_cast(env->NewGlobalRef(packet_cls));
+ }
+}
+
+::mediapipe::Status Graph::RunGraphUntilClose(JNIEnv* env) {
+ // Get a global reference to the packet class, so it can be used in other
+ // native thread for call back.
+ SetPacketJavaClass(env);
+ // Running as a synchronized mode, the same Java thread is available through
+ // out the run.
+ CalculatorGraph calculator_graph;
+ ::mediapipe::Status status = InitializeGraph(&calculator_graph);
+ if (!status.ok()) {
+ LOG(ERROR) << status.message();
+ running_graph_.reset(nullptr);
+ return status;
+ }
+ // TODO: gpu & services set up!
+ status = calculator_graph.Run(CreateCombinedSidePackets());
+ LOG(INFO) << "Graph run finished.";
+
+ return status;
+}
+
+::mediapipe::Status Graph::StartRunningGraph(JNIEnv* env) {
+ if (running_graph_) {
+ return ::mediapipe::InternalError("Graph is already running.");
+ }
+ // Get a global reference to the packet class, so it can be used in other
+ // native thread for call back.
+ SetPacketJavaClass(env);
+ // Running as a synchronized mode, the same Java thread is available
+ // throughout the run.
+ running_graph_.reset(new CalculatorGraph());
+ // Set the mode for adding packets to graph input streams.
+ running_graph_->SetGraphInputStreamAddMode(graph_input_stream_add_mode_);
+ if (VLOG_IS_ON(2)) {
+ LOG(INFO) << "input packet streams:";
+ for (auto& name : graph_config()->input_stream()) {
+ LOG(INFO) << name;
+ }
+ }
+ ::mediapipe::Status status;
+#ifndef MEDIAPIPE_DISABLE_GPU
+ status = running_graph_->SetGpuResources(gpu_resources_);
+ if (!status.ok()) {
+ LOG(ERROR) << status.message();
+ running_graph_.reset(nullptr);
+ return status;
+ }
+#endif // !defined(MEDIAPIPE_DISABLE_GPU)
+
+ for (const auto& service_packet : service_packets_) {
+ status = running_graph_->SetServicePacket(*service_packet.first,
+ service_packet.second);
+ if (!status.ok()) {
+ LOG(ERROR) << status.message();
+ running_graph_.reset(nullptr);
+ return status;
+ }
+ }
+
+ status = InitializeGraph(running_graph_.get());
+ if (!status.ok()) {
+ LOG(ERROR) << status.message();
+ running_graph_.reset(nullptr);
+ return status;
+ }
+ LOG(INFO) << "Start running the graph, waiting for inputs.";
+ status =
+ running_graph_->StartRun(CreateCombinedSidePackets(), stream_headers_);
+ if (!status.ok()) {
+ LOG(ERROR) << status;
+ running_graph_.reset(nullptr);
+ return status;
+ }
+ return mediapipe::OkStatus();
+}
+
+::mediapipe::Status Graph::SetTimestampAndMovePacketToInputStream(
+ const std::string& stream_name, int64_t packet_handle, int64_t timestamp) {
+ internal::PacketWithContext* packet_with_context =
+ reinterpret_cast(packet_handle);
+ Packet& packet = packet_with_context->packet();
+
+ // Set the timestamp of the packet in-place by calling the rvalue-reference
+ // version of At here.
+ packet = std::move(packet).At(Timestamp(timestamp));
+
+ // Then std::move it into the input stream.
+ return AddPacketToInputStream(stream_name, std::move(packet));
+}
+
+::mediapipe::Status Graph::AddPacketToInputStream(
+ const std::string& stream_name, const Packet& packet) {
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+
+ return running_graph_->AddPacketToInputStream(stream_name, packet);
+}
+
+::mediapipe::Status Graph::AddPacketToInputStream(
+ const std::string& stream_name, Packet&& packet) {
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+
+ return running_graph_->AddPacketToInputStream(stream_name, std::move(packet));
+}
+
+::mediapipe::Status Graph::CloseInputStream(std::string stream_name) {
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+ LOG(INFO) << "Close input stream: " << stream_name;
+ return running_graph_->CloseInputStream(stream_name);
+}
+
+::mediapipe::Status Graph::CloseAllInputStreams() {
+ LOG(INFO) << "Close all input streams.";
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+ return running_graph_->CloseAllInputStreams();
+}
+
+::mediapipe::Status Graph::CloseAllPacketSources() {
+ LOG(INFO) << "Close all input streams.";
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+ return running_graph_->CloseAllPacketSources();
+}
+
+::mediapipe::Status Graph::WaitUntilDone(JNIEnv* env) {
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+ ::mediapipe::Status status = running_graph_->WaitUntilDone();
+ running_graph_.reset(nullptr);
+ return status;
+}
+
+::mediapipe::Status Graph::WaitUntilIdle(JNIEnv* env) {
+ if (!running_graph_) {
+ return ::mediapipe::FailedPreconditionError("Graph must be running.");
+ }
+ return running_graph_->WaitUntilIdle();
+}
+
+void Graph::SetInputSidePacket(const std::string& stream_name,
+ const Packet& packet) {
+ side_packets_[stream_name] = packet;
+}
+
+void Graph::SetStreamHeader(const std::string& stream_name,
+ const Packet& packet) {
+ stream_headers_[stream_name] = packet;
+ LOG(INFO) << stream_name << " stream header being set.";
+}
+
+void Graph::SetGraphInputStreamAddMode(
+ CalculatorGraph::GraphInputStreamAddMode mode) {
+ graph_input_stream_add_mode_ = mode;
+}
+
+mediapipe::GpuResources* Graph::GetGpuResources() const {
+ return gpu_resources_.get();
+}
+
+::mediapipe::Status Graph::SetParentGlContext(int64 java_gl_context) {
+ if (gpu_resources_) {
+ return ::mediapipe::AlreadyExistsError(
+ "trying to set the parent GL context, but the gpu shared "
+ "data has already been set up.");
+ }
+#ifdef MEDIAPIPE_DISABLE_GPU
+ LOG(FATAL) << "GPU support has been disabled in this build!";
+#else
+ gpu_resources_ = mediapipe::GpuResources::Create(
+ reinterpret_cast(java_gl_context))
+ .ValueOrDie();
+#endif // defined(MEDIAPIPE_DISABLE_GPU)
+ return ::mediapipe::OkStatus();
+}
+
+void Graph::SetServicePacket(const GraphServiceBase& service, Packet packet) {
+ service_packets_[&service] = std::move(packet);
+}
+
+void Graph::CancelGraph() {
+ if (running_graph_) {
+ running_graph_->Cancel();
+ }
+}
+
+std::map Graph::CreateCombinedSidePackets() {
+ std::map combined_side_packets = side_packets_callbacks_;
+ combined_side_packets.insert(side_packets_.begin(), side_packets_.end());
+ combined_side_packets.insert(output_surface_side_packets_.begin(),
+ output_surface_side_packets_.end());
+ return combined_side_packets;
+}
+
+ProfilingContext* Graph::GetProfilingContext() {
+ if (running_graph_) {
+ return running_graph_->profiler();
+ }
+ return nullptr;
+}
+
+CalculatorGraphConfig* Graph::graph_config() {
+ // Return the last specified graph config with the required graph_type.
+ for (auto it = graph_configs_.rbegin(); it != graph_configs_.rend(); ++it) {
+ if (it->type() == graph_type()) {
+ return &*it;
+ }
+ }
+ for (auto it = graph_templates_.rbegin(); it != graph_templates_.rend();
+ ++it) {
+ if (it->mutable_config()->type() == graph_type()) {
+ return it->mutable_config();
+ }
+ }
+ return nullptr;
+}
+
+std::string Graph::graph_type() {
+ // If a graph-type is specified, that type is used. Otherwise the
+ // graph-type of the last specified graph config is used.
+ if (graph_type_ != "") {
+ return graph_type_;
+ }
+ if (!graph_configs_.empty()) {
+ return graph_configs_.back().type();
+ }
+ if (!graph_templates_.empty()) {
+ return graph_templates_.back().config().type();
+ }
+ return "";
+}
+
+::mediapipe::Status Graph::InitializeGraph(CalculatorGraph* graph) {
+ if (graph_configs_.size() == 1 && graph_templates_.empty()) {
+ return graph->Initialize(*graph_config());
+ } else {
+ return graph->Initialize(graph_configs_, graph_templates_, {}, graph_type(),
+ &graph_options_);
+ }
+}
+
+} // namespace android
+} // namespace mediapipe
diff --git a/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h
new file mode 100644
index 000000000..c6f64b6fe
--- /dev/null
+++ b/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpuAndroidStudioProject/app/src/main/java/com/google/mediapipe/framework/jni/graph.h
@@ -0,0 +1,247 @@
+// Copyright 2019 The MediaPipe Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_
+#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_
+
+#include
+
+#include