Add files via upload

This commit is contained in:
Ali Zahid Raja 2019-10-16 19:26:24 +05:00 committed by GitHub
parent 5bc989d4de
commit 135ef86b87
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
100 changed files with 22711 additions and 0 deletions

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.id="Hand Tracking GPU" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$" external.system.id="GRADLE" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="java-gradle" name="Java-Gradle">
<configuration>
<option name="BUILD_FOLDER_PATH" value="$MODULE_DIR$/build" />
<option name="BUILDABLE" value="false" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.gradle" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,157 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.id=":app" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$/.." external.system.id="GRADLE" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="android-gradle" name="Android-Gradle">
<configuration>
<option name="GRADLE_PROJECT_PATH" value=":app" />
<option name="LAST_SUCCESSFUL_SYNC_AGP_VERSION" value="3.5.1" />
<option name="LAST_KNOWN_AGP_VERSION" value="3.5.1" />
</configuration>
</facet>
<facet type="android" name="Android">
<configuration>
<option name="SELECTED_BUILD_VARIANT" value="debug" />
<option name="ASSEMBLE_TASK_NAME" value="assembleDebug" />
<option name="COMPILE_JAVA_TASK_NAME" value="compileDebugSources" />
<afterSyncTasks>
<task>generateDebugSources</task>
</afterSyncTasks>
<option name="ALLOW_USER_CONFIGURATION" value="false" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/src/main/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/src/main/res" />
<option name="RES_FOLDERS_RELATIVE_PATH" value="file://$MODULE_DIR$/src/main/res;file://$MODULE_DIR$/build/generated/res/resValues/debug" />
<option name="TEST_RES_FOLDERS_RELATIVE_PATH" value="" />
<option name="ASSETS_FOLDER_RELATIVE_PATH" value="/src/main/assets" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/build/intermediates/javac/debug/classes" />
<output-test url="file://$MODULE_DIR$/build/intermediates/javac/debugUnitTest/classes" />
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/build/generated/ap_generated_sources/debug/out" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/aidl_source_output_dir/debug/compileDebugAidl/out" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/renderscript_source_output_dir/debug/compileDebugRenderscript/out" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/debug" type="java-resource" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/debug" type="java-resource" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/ap_generated_sources/debugAndroidTest/out" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/aidl_source_output_dir/debugAndroidTest/compileDebugAndroidTestAidl/out" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/androidTest/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/renderscript_source_output_dir/debugAndroidTest/compileDebugAndroidTestRenderscript/out" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/androidTest/debug" type="java-test-resource" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/androidTest/debug" type="java-test-resource" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/ap_generated_sources/debugUnitTest/out" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/assets" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/aidl" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/shaders" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/shaders" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/testDebug/shaders" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/main/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/assets" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/aidl" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/shaders" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/shaders" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/shaders" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build" />
</content>
<orderEntry type="jdk" jdkName="Android API 29 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" scope="TEST" name="Gradle: junit:junit:4.12@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: org.hamcrest:hamcrest-integration:1.3@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: org.hamcrest:hamcrest-library:1.3@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: org.hamcrest:hamcrest-core:1.3@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: net.sf.kxml:kxml2:2.3.0@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: com.squareup:javawriter:2.1.1@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: javax.inject:javax.inject:1@jar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test.ext:junit:1.1.0@aar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test.espresso:espresso-core:3.1.1@aar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test:runner:1.1.1@aar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test:core:1.1.0@aar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test:monitor:1.1.1@aar" level="project" />
<orderEntry type="library" scope="TEST" name="Gradle: androidx.test.espresso:espresso-idling-resource:3.1.1@aar" level="project" />
<orderEntry type="library" name="Gradle: org.glassfish:javax.annotation:10.0-b28@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.collection:collection:1.0.0@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.lifecycle:lifecycle-common:2.1.0@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.concurrent:concurrent-futures:1.0.0-alpha03@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.arch.core:core-common:2.1.0@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.annotation:annotation:1.1.0@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.constraintlayout:constraintlayout-solver:1.1.3@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.guava:guava:28.1-jre@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.guava:failureaccess:1.0.1@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.flogger:flogger:0.4@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.code.findbugs:jsr305:3.0.2@jar" level="project" />
<orderEntry type="library" name="Gradle: org.checkerframework:checker-qual:2.8.1@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.errorprone:error_prone_annotations:2.3.2@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.j2objc:j2objc-annotations:1.3@jar" level="project" />
<orderEntry type="library" name="Gradle: org.codehaus.mojo:animal-sniffer-annotations:1.18@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.protobuf:protobuf-java:3.5.1@jar" level="project" />
<orderEntry type="library" name="Gradle: org.jetbrains:annotations:15.0@jar" level="project" />
<orderEntry type="library" name="Gradle: com.google.auto.value:auto-value-annotations:1.6.3@jar" level="project" />
<orderEntry type="library" name="Gradle: androidx.appcompat:appcompat:1.0.2@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.camera:camera-extensions:1.0.0-alpha03@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.camera:camera-camera2:1.0.0-alpha06@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.camera:camera-view:1.0.0-alpha03@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.camera:camera-core:1.0.0-alpha06@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.fragment:fragment:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.vectordrawable:vectordrawable-animated:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.legacy:legacy-support-core-ui:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.legacy:legacy-support-core-utils:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.vectordrawable:vectordrawable:1.0.1@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.loader:loader:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.viewpager:viewpager:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.coordinatorlayout:coordinatorlayout:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.drawerlayout:drawerlayout:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.slidingpanelayout:slidingpanelayout:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.customview:customview:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.swiperefreshlayout:swiperefreshlayout:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.asynclayoutinflater:asynclayoutinflater:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.core:core:1.1.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.versionedparcelable:versionedparcelable:1.1.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.cursoradapter:cursoradapter:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.lifecycle:lifecycle-runtime:2.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.lifecycle:lifecycle-livedata:2.1.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.lifecycle:lifecycle-livedata-core:2.1.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.exifinterface:exifinterface:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.documentfile:documentfile:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.localbroadcastmanager:localbroadcastmanager:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.print:print:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.lifecycle:lifecycle-viewmodel:2.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.arch.core:core-runtime:2.1.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.interpolator:interpolator:1.0.0@aar" level="project" />
<orderEntry type="library" name="Gradle: androidx.constraintlayout:constraintlayout:1.1.3@aar" level="project" />
</component>
</module>

View File

@ -0,0 +1,53 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
buildToolsVersion "29.0.2"
defaultConfig {
applicationId "com.example.handtrackinggpu"
minSdkVersion 21
targetSdkVersion 29
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.0.2'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.0'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
implementation "com.google.guava:guava:28.1-jre"
implementation "com.google.flogger:flogger:0.4"
implementation 'com.google.protobuf:protobuf-java:3.5.1'
compileOnly 'org.glassfish:javax.annotation:10.0-b28'
implementation 'androidx.annotation:annotation:1.1.0'
implementation 'org.jetbrains:annotations:15.0'
// implementation 'com.intellij:annotations:+@jar'
implementation "androidx.camera:camera-core:1.0.0-alpha06"
// If you want to use Camera2 extensions
implementation "androidx.camera:camera-camera2:1.0.0-alpha06"
// If you to use the Camera View class
implementation "androidx.camera:camera-view:1.0.0-alpha03"
// If you to use Camera Extensions
implementation "androidx.camera:camera-extensions:1.0.0-alpha03"
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.handtrackinggpu">
<!-- For using the camera -->
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<!-- For MediaPipe -->
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,173 @@
package com.example.handtrackinggpu;
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.os.Bundle;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.CameraXPreviewHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.glutil.EglManager;
/** Main activity of MediaPipe example apps. */
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final String BINARY_GRAPH_NAME = "handtrackinggpu.binarypb";
private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
// Flips the camera-preview frames vertically before sending them into FrameProcessor to be
// processed in a MediaPipe graph, and flips the processed frames back when they are displayed.
// This is needed because OpenGL represents images assuming the image origin is at the bottom-left
// corner, whereas MediaPipe in general assumes the image origin is at top-left.
private static final boolean FLIP_FRAMES_VERTICALLY = true;
static {
// Load all native libraries needed by the app.
System.loadLibrary("mediapipe_jni");
System.loadLibrary("opencv_java4");
}
// {@link SurfaceTexture} where the camera-preview frames can be accessed.
private SurfaceTexture previewFrameTexture;
// {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
private SurfaceView previewDisplayView;
// Creates and manages an {@link EGLContext}.
private EglManager eglManager;
// Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
// frames onto a {@link Surface}.
private FrameProcessor processor;
// Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
// consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
private ExternalTextureConverter converter;
// Handles camera access via the {@link CameraX} Jetpack support library.
private CameraXPreviewHelper cameraHelper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
previewDisplayView = new SurfaceView(this);
setupPreviewDisplayView();
// Initialize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
// binary graphs.
AndroidAssetUtil.initializeNativeAssetManager(this);
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
PermissionHelper.checkAndRequestCameraPermissions(this);
}
@Override
protected void onResume() {
super.onResume();
converter = new ExternalTextureConverter(eglManager.getContext());
converter.setFlipY(FLIP_FRAMES_VERTICALLY);
converter.setConsumer(processor);
if (PermissionHelper.cameraPermissionsGranted(this)) {
startCamera();
}
}
@Override
protected void onPause() {
super.onPause();
converter.close();
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private void setupPreviewDisplayView() {
previewDisplayView.setVisibility(View.GONE);
ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
viewGroup.addView(previewDisplayView);
previewDisplayView
.getHolder()
.addCallback(
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// (Re-)Compute the ideal size of the camera-preview display (the area that the
// camera-preview frames get rendered onto, potentially with scaling and rotation)
// based on the size of the SurfaceView that contains the display.
Size viewSize = new Size(width, height);
Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
// Connect the converter to the camera-preview frames as its input (via
// previewFrameTexture), and configure the output width and height as the computed
// display size.
converter.setSurfaceTextureAndAttachToGLContext(
previewFrameTexture, displaySize.getWidth(), displaySize.getHeight());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(null);
}
});
}
private void startCamera() {
cameraHelper = new CameraXPreviewHelper();
cameraHelper.setOnCameraStartedListener(
surfaceTexture -> {
previewFrameTexture = surfaceTexture;
// Make the display view visible to start showing the preview. This triggers the
// SurfaceHolder.Callback added to (the holder of) previewDisplayView.
previewDisplayView.setVisibility(View.VISIBLE);
});
cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null);
}
}

View File

@ -0,0 +1,70 @@
# Copyright 2019 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"]) # Apache 2.0
android_library(
name = "android_components",
srcs = glob(
["*.java"],
exclude = [
"CameraHelper.java",
"CameraXPreviewHelper.java",
],
),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/java/com/google/mediapipe/framework:android_framework",
"//mediapipe/java/com/google/mediapipe/glutil",
"//third_party:androidx_appcompat",
"//third_party:androidx_core",
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
],
)
# Note: We need to separate the camera helper files in a different BUILD target because CameraX has a minimum Android API
# requirement of API 21. Users of android_components may have different API dependencies.
android_library(
name = "android_camerax_helper",
srcs = [
"CameraHelper.java",
"CameraXPreviewHelper.java",
],
visibility = ["//visibility:public"],
deps = [
"//third_party:androidx_appcompat",
"//third_party:androidx_legacy_support_v4",
"//third_party:camera2",
"//third_party:camerax_core",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
],
)
android_library(
name = "android_microphone_helper",
srcs = [
"MicrophoneHelper.java",
],
visibility = ["//visibility:public"],
deps = [
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
],
)

View File

@ -0,0 +1,63 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.util.Size;
import javax.annotation.Nullable;
/** Abstract interface for a helper class that manages camera access. */
public abstract class CameraHelper {
/** The listener is called when camera start is complete. */
public interface OnCameraStartedListener {
/**
* Called when camera start is complete and the camera-preview frames can be accessed from the
* surfaceTexture. The surfaceTexture can be null if it is not prepared by the CameraHelper.
*/
public void onCameraStarted(@Nullable SurfaceTexture surfaceTexture);
}
protected static final String TAG = "CameraHelper";
/** Represents the direction the camera faces relative to device screen. */
public static enum CameraFacing {
FRONT,
BACK
};
protected OnCameraStartedListener onCameraStartedListener;
protected CameraFacing cameraFacing;
/**
* Initializes the camera and sets it up for accessing frames from a custom SurfaceTexture object.
* The SurfaceTexture object can be null when it is the CameraHelper that prepares a
* SurfaceTexture object for grabbing frames.
*/
public abstract void startCamera(
Activity context, CameraFacing cameraFacing, @Nullable SurfaceTexture surfaceTexture);
/**
* Computes the ideal size of the camera-preview display (the area that the camera-preview frames
* get rendered onto, potentially with scaling and rotation) based on the size of the view
* containing the display. Returns the computed display size.
*/
public abstract Size computeDisplaySizeFromViewSize(Size viewSize);
public void setOnCameraStartedListener(@Nullable OnCameraStartedListener listener) {
onCameraStartedListener = listener;
}
}

View File

@ -0,0 +1,102 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.app.Activity;
import androidx.lifecycle.LifecycleOwner;
import android.graphics.SurfaceTexture;
import android.util.Log;
import android.util.Size;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraX.LensFacing;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
/**
* Uses CameraX APIs for camera setup and access.
*
* <p>{@link CameraX} connects to the camera and provides video frames.
*/
public class CameraXPreviewHelper extends CameraHelper {
private static final String TAG = "CameraXPreviewHelper";
private Preview preview;
// Size of the camera-preview frames from the camera.
private Size frameSize;
// Rotation of the camera-preview frames in degrees.
private int frameRotation;
@Override
@SuppressWarnings("RestrictTo") // See b/132705545.
public void startCamera(
Activity context, CameraFacing cameraFacing, SurfaceTexture surfaceTexture) {
LensFacing cameraLensFacing =
cameraFacing == CameraHelper.CameraFacing.FRONT ? LensFacing.FRONT : LensFacing.BACK;
PreviewConfig previewConfig =
new PreviewConfig.Builder().setLensFacing(cameraLensFacing).build();
preview = new Preview(previewConfig);
preview.setOnPreviewOutputUpdateListener(
previewOutput -> {
if (!previewOutput.getTextureSize().equals(frameSize)) {
frameSize = previewOutput.getTextureSize();
frameRotation = previewOutput.getRotationDegrees();
if (frameSize.getWidth() == 0 || frameSize.getHeight() == 0) {
// Invalid frame size. Wait for valid input dimensions before updating display size.
Log.d(TAG, "Invalid frameSize.");
return;
}
}
if (onCameraStartedListener != null) {
onCameraStartedListener.onCameraStarted(previewOutput.getSurfaceTexture());
}
});
CameraX.bindToLifecycle(/*lifecycleOwner=*/ (LifecycleOwner) context, preview);
}
@Override
public Size computeDisplaySizeFromViewSize(Size viewSize) {
if (viewSize == null || frameSize == null) {
// Wait for all inputs before setting display size.
Log.d(TAG, "viewSize or frameSize is null.");
return null;
}
// Valid rotation values are 0, 90, 180 and 270.
// Frames are rotated relative to the device's "natural" landscape orientation. When in portrait
// mode, valid rotation values are 90 or 270, and the width/height should be swapped to
// calculate aspect ratio.
float frameAspectRatio =
frameRotation == 90 || frameRotation == 270
? frameSize.getHeight() / (float) frameSize.getWidth()
: frameSize.getWidth() / (float) frameSize.getHeight();
float viewAspectRatio = viewSize.getWidth() / (float) viewSize.getHeight();
// Match shortest sides together.
int scaledWidth;
int scaledHeight;
if (frameAspectRatio < viewAspectRatio) {
scaledWidth = viewSize.getWidth();
scaledHeight = Math.round(viewSize.getWidth() / frameAspectRatio);
} else {
scaledHeight = viewSize.getHeight();
scaledWidth = Math.round(viewSize.getHeight() * frameAspectRatio);
}
return new Size(scaledWidth, scaledHeight);
}
}

View File

@ -0,0 +1,373 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.util.Log;
import com.google.mediapipe.framework.AppTextureFrame;
import com.google.mediapipe.glutil.ExternalTextureRenderer;
import com.google.mediapipe.glutil.GlThread;
import com.google.mediapipe.glutil.ShaderUtil;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.microedition.khronos.egl.EGLContext;
/**
* Textures from {@link SurfaceTexture} are only supposed to be bound to target {@link
* GLES11Ext#GL_TEXTURE_EXTERNAL_OES}, which is accessed using samplerExternalOES in the shader.
* This means they cannot be used with a regular shader that expects a sampler2D. This class creates
* a copy of the texture that can be used with {@link GLES20#GL_TEXTURE_2D} and sampler2D.
*/
public class ExternalTextureConverter implements TextureFrameProducer {
private static final String TAG = "ExternalTextureConv"; // Max length of a tag is 23.
private static final int DEFAULT_NUM_BUFFERS = 2; // Number of output frames allocated.
private static final String THREAD_NAME = "ExternalTextureConverter";
private RenderThread thread;
/**
* Creates the ExternalTextureConverter to create a working copy of each camera frame.
*
* @param numBuffers the number of camera frames that can enter processing simultaneously.
*/
public ExternalTextureConverter(EGLContext parentContext, int numBuffers) {
thread = new RenderThread(parentContext, numBuffers);
thread.setName(THREAD_NAME);
thread.start();
try {
thread.waitUntilReady();
} catch (InterruptedException ie) {
// Someone interrupted our thread. This is not supposed to happen: we own
// the thread, and we are not going to interrupt it. Therefore, it is not
// reasonable for this constructor to throw an InterruptedException
// (which is a checked exception). If it should somehow happen that the
// thread is interrupted, let's set the interrupted flag again, log the
// error, and throw a RuntimeException.
Thread.currentThread().interrupt();
Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
throw new RuntimeException(ie);
}
}
/**
* Sets vertical flipping of the texture, useful for conversion between coordinate systems with
* top-left v.s. bottom-left origins. This should be called before {@link
* #setSurfaceTexture(SurfaceTexture, int, int)} or {@link
* #setSurfaceTextureAndAttachToGLContext(SurfaceTexture, int, int)}.
*/
public void setFlipY(boolean flip) {
thread.setFlipY(flip);
}
public ExternalTextureConverter(EGLContext parentContext) {
this(parentContext, DEFAULT_NUM_BUFFERS);
}
public ExternalTextureConverter(
EGLContext parentContext, SurfaceTexture texture, int targetWidth, int targetHeight) {
this(parentContext);
thread.setSurfaceTexture(texture, targetWidth, targetHeight);
}
/**
* Sets the input surface texture.
*
* <p>The provided width and height will be the size of the converted texture, so if the input
* surface texture is rotated (as expressed by its transformation matrix) the provided width and
* height should be swapped.
*/
// TODO: Clean up setSurfaceTexture methods.
public void setSurfaceTexture(SurfaceTexture texture, int width, int height) {
if (texture != null && (width == 0 || height == 0)) {
throw new RuntimeException(
"ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero");
}
thread.getHandler().post(() -> thread.setSurfaceTexture(texture, width, height));
}
// TODO: Clean up setSurfaceTexture methods.
public void setSurfaceTextureAndAttachToGLContext(SurfaceTexture texture, int width, int height) {
if (texture != null && (width == 0 || height == 0)) {
throw new RuntimeException(
"ExternalTextureConverter: setSurfaceTexture dimensions cannot be zero");
}
thread
.getHandler()
.post(() -> thread.setSurfaceTextureAndAttachToGLContext(texture, width, height));
}
@Override
public void setConsumer(TextureFrameConsumer next) {
thread.setConsumer(next);
}
public void addConsumer(TextureFrameConsumer consumer) {
thread.addConsumer(consumer);
}
public void removeConsumer(TextureFrameConsumer consumer) {
thread.removeConsumer(consumer);
}
public void close() {
if (thread == null) {
return;
}
thread.getHandler().post(() -> thread.setSurfaceTexture(null, 0, 0));
thread.quitSafely();
try {
thread.join();
} catch (InterruptedException ie) {
// Set the interrupted flag again, log the error, and throw a RuntimeException.
Thread.currentThread().interrupt();
Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
throw new RuntimeException(ie);
}
}
private static class RenderThread extends GlThread
implements SurfaceTexture.OnFrameAvailableListener {
private static final long NANOS_PER_MICRO = 1000; // Nanoseconds in one microsecond.
private volatile SurfaceTexture surfaceTexture = null;
private final List<TextureFrameConsumer> consumers;
private List<AppTextureFrame> outputFrames = null;
private int outputFrameIndex = -1;
private ExternalTextureRenderer renderer = null;
private long timestampOffset = 0;
private long previousTimestamp = 0;
protected int destinationWidth = 0;
protected int destinationHeight = 0;
public RenderThread(EGLContext parentContext, int numBuffers) {
super(parentContext);
outputFrames = new ArrayList<>();
outputFrames.addAll(Collections.nCopies(numBuffers, null));
renderer = new ExternalTextureRenderer();
consumers = new ArrayList<>();
}
public void setFlipY(boolean flip) {
renderer.setFlipY(flip);
}
public void setSurfaceTexture(SurfaceTexture texture, int width, int height) {
if (surfaceTexture != null) {
surfaceTexture.setOnFrameAvailableListener(null);
}
surfaceTexture = texture;
if (surfaceTexture != null) {
surfaceTexture.setOnFrameAvailableListener(this);
}
destinationWidth = width;
destinationHeight = height;
}
public void setSurfaceTextureAndAttachToGLContext(
SurfaceTexture texture, int width, int height) {
setSurfaceTexture(texture, width, height);
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
surfaceTexture.attachToGLContext(textures[0]);
}
public void setConsumer(TextureFrameConsumer consumer) {
synchronized (consumers) {
consumers.clear();
consumers.add(consumer);
}
}
public void addConsumer(TextureFrameConsumer consumer) {
synchronized (consumers) {
consumers.add(consumer);
}
}
public void removeConsumer(TextureFrameConsumer consumer) {
synchronized (consumers) {
consumers.remove(consumer);
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
handler.post(() -> renderNext(surfaceTexture));
}
@Override
public void prepareGl() {
super.prepareGl();
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
renderer.setup();
}
@Override
public void releaseGl() {
for (int i = 0; i < outputFrames.size(); ++i) {
teardownDestination(i);
}
renderer.release();
super.releaseGl(); // This releases the EGL context, so must do it after any GL calls.
}
protected void renderNext(SurfaceTexture fromTexture) {
if (fromTexture != surfaceTexture) {
// Although the setSurfaceTexture and renderNext methods are correctly sequentialized on
// the same thread, the onFrameAvailable callback is not. Therefore, it is possible for
// onFrameAvailable to queue up a renderNext call while a setSurfaceTexture call is still
// pending on the handler. When that happens, we should simply disregard the call.
return;
}
try {
synchronized (consumers) {
boolean frameUpdated = false;
for (TextureFrameConsumer consumer : consumers) {
AppTextureFrame outputFrame = nextOutputFrame();
// TODO: Switch to ref-counted single copy instead of making additional
// copies blitting to separate textures each time.
updateOutputFrame(outputFrame);
frameUpdated = true;
if (consumer != null) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(
TAG,
String.format(
"Locking tex: %d width: %d height: %d",
outputFrame.getTextureName(),
outputFrame.getWidth(),
outputFrame.getHeight()));
}
outputFrame.setInUse();
consumer.onNewFrame(outputFrame);
}
}
if (!frameUpdated) { // Need to update the frame even if there are no consumers.
AppTextureFrame outputFrame = nextOutputFrame();
// TODO: Switch to ref-counted single copy instead of making additional
// copies blitting to separate textures each time.
updateOutputFrame(outputFrame);
}
}
} finally {
}
}
private void teardownDestination(int index) {
if (outputFrames.get(index) != null) {
waitUntilReleased(outputFrames.get(index));
GLES20.glDeleteTextures(1, new int[] {outputFrames.get(index).getTextureName()}, 0);
outputFrames.set(index, null);
}
}
private void setupDestination(int index) {
teardownDestination(index);
int destinationTextureId = ShaderUtil.createRgbaTexture(destinationWidth, destinationHeight);
Log.d(
TAG,
String.format(
"Created output texture: %d width: %d height: %d",
destinationTextureId, destinationWidth, destinationHeight));
bindFramebuffer(destinationTextureId, destinationWidth, destinationHeight);
outputFrames.set(
index, new AppTextureFrame(destinationTextureId, destinationWidth, destinationHeight));
}
/**
* Gets next available frame or creates new one if next frame is not initialized
* or cannot be used with current surface texture.
*
* <ul>
* <li>Makes sure frame width and height are same as current surface texture</li>
* <li>Makes sure frame is not in use (blocks thread until frame is released)</li>
* </ul>
*
* NOTE: must be invoked on GL thread
*/
private AppTextureFrame nextOutputFrame() {
outputFrameIndex = (outputFrameIndex + 1) % outputFrames.size();
AppTextureFrame outputFrame = outputFrames.get(outputFrameIndex);
// Check if the size has changed.
if (outputFrame == null
|| outputFrame.getWidth() != destinationWidth
|| outputFrame.getHeight() != destinationHeight) {
// setupDestination will wait for the frame to be released before reallocating it.
setupDestination(outputFrameIndex);
outputFrame = outputFrames.get(outputFrameIndex);
}
waitUntilReleased(outputFrame);
return outputFrame;
}
/**
* Updates output frame with current pixels of surface texture and corresponding timestamp.
*
* @param outputFrame {@link AppTextureFrame} to populate.
*
* NOTE: must be invoked on GL thread
*/
private void updateOutputFrame(AppTextureFrame outputFrame) {
// Copy surface texture's pixels to output frame
bindFramebuffer(outputFrame.getTextureName(), destinationWidth, destinationHeight);
renderer.render(surfaceTexture);
// Populate frame timestamp with surface texture timestamp after render() as renderer
// ensures that surface texture has the up-to-date timestamp. (Also adjust |timestampOffset|
// to ensure that timestamps increase monotonically.)
long textureTimestamp = surfaceTexture.getTimestamp() / NANOS_PER_MICRO;
if (textureTimestamp + timestampOffset <= previousTimestamp) {
timestampOffset = previousTimestamp + 1 - textureTimestamp;
}
outputFrame.setTimestamp(textureTimestamp + timestampOffset);
previousTimestamp = outputFrame.getTimestamp();
}
private void waitUntilReleased(AppTextureFrame frame) {
try {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(
TAG,
String.format(
"Waiting for tex: %d width: %d height: %d",
frame.getTextureName(), frame.getWidth(), frame.getHeight()));
}
frame.waitUntilReleased();
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(
TAG,
String.format(
"Finished waiting for tex: %d width: %d height: %d",
frame.getTextureName(), frame.getWidth(), frame.getHeight()));
}
} catch (InterruptedException ie) {
// Someone interrupted our thread. This is not supposed to happen: we own
// the thread, and we are not going to interrupt it. If it should somehow
// happen that the thread is interrupted, let's set the interrupted flag
// again, log the error, and throw a RuntimeException.
Thread.currentThread().interrupt();
Log.e(TAG, "thread was unexpectedly interrupted: " + ie.getMessage());
throw new RuntimeException(ie);
}
}
}
}

View File

@ -0,0 +1,303 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import com.google.common.base.Preconditions;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.framework.AndroidPacketCreator;
import com.google.mediapipe.framework.Graph;
import com.google.mediapipe.framework.GraphService;
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketCallback;
import com.google.mediapipe.framework.PacketGetter;
import com.google.mediapipe.framework.SurfaceOutput;
import com.google.mediapipe.framework.TextureFrame;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nullable;
/**
* A {@link com.google.mediapipe.components.TextureFrameProcessor} that sends video frames through a
* MediaPipe graph.
*/
public class FrameProcessor implements TextureFrameProcessor {
private static final String TAG = "FrameProcessor";
private List<TextureFrameConsumer> consumers = new ArrayList<>();
private Graph mediapipeGraph;
private AndroidPacketCreator packetCreator;
private OnWillAddFrameListener addFrameListener;
private String videoInputStream;
private String videoInputStreamCpu;
private String videoOutputStream;
private SurfaceOutput videoSurfaceOutput;
private final AtomicBoolean started = new AtomicBoolean(false);
private boolean hybridPath = false;
/**
* Constructor.
*
* @param context an Android {@link Context}.
* @param parentNativeContext a native handle to a GL context. The GL context(s) used by the
* calculators in the graph will join the parent context's sharegroup, so that textures
* generated by the calculators are available in the parent context, and vice versa.
* @param graphName the name of the file containing the binary representation of the graph.
* @param inputStream the graph input stream that will receive input video frames.
* @param outputStream the output stream from which output frames will be produced.
*/
public FrameProcessor(
Context context,
long parentNativeContext,
String graphName,
String inputStream,
String outputStream) {
mediapipeGraph = new Graph();
videoInputStream = inputStream;
videoOutputStream = outputStream;
try {
if (new File(graphName).isAbsolute()) {
mediapipeGraph.loadBinaryGraph(graphName);
} else {
mediapipeGraph.loadBinaryGraph(
AndroidAssetUtil.getAssetBytes(context.getAssets(), graphName));
}
packetCreator = new AndroidPacketCreator(mediapipeGraph);
mediapipeGraph.addPacketCallback(
videoOutputStream,
new PacketCallback() {
@Override
public void process(Packet packet) {
List<TextureFrameConsumer> currentConsumers;
synchronized (this) {
currentConsumers = consumers;
}
for (TextureFrameConsumer consumer : currentConsumers) {
TextureFrame frame = PacketGetter.getTextureFrame(packet);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(
TAG,
String.format(
"Output tex: %d width: %d height: %d to consumer %h",
frame.getTextureName(), frame.getWidth(), frame.getHeight(), consumer));
}
consumer.onNewFrame(frame);
}
}
});
mediapipeGraph.setParentGlContext(parentNativeContext);
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
videoSurfaceOutput = mediapipeGraph.addSurfaceOutput(videoOutputStream);
}
/**
* Interface to be used so that this class can receive a callback when onNewFrame has determined
* it will process an input frame. Can be used to feed packets to accessory streams.
*/
public interface OnWillAddFrameListener {
void onWillAddFrame(long timestamp);
}
public synchronized <T> void setServiceObject(GraphService<T> service, T object) {
mediapipeGraph.setServiceObject(service, object);
}
public void setInputSidePackets(Map<String, Packet> inputSidePackets) {
Preconditions.checkState(
!started.get(), "setInputSidePackets must be called before the graph is started");
mediapipeGraph.setInputSidePackets(inputSidePackets);
}
@Override
public void setConsumer(TextureFrameConsumer listener) {
synchronized (this) {
consumers = Arrays.asList(listener);
}
}
public void setVideoInputStreamCpu(String inputStream) {
videoInputStreamCpu = inputStream;
}
public void setHybridPath() {
hybridPath = true;
}
public void addConsumer(TextureFrameConsumer listener) {
synchronized (this) {
List<TextureFrameConsumer> newConsumers = new ArrayList<>(consumers);
newConsumers.add(listener);
consumers = newConsumers;
}
}
public boolean removeConsumer(TextureFrameConsumer listener) {
boolean existed;
synchronized (this) {
List<TextureFrameConsumer> newConsumers = new ArrayList<>(consumers);
existed = newConsumers.remove(listener);
consumers = newConsumers;
}
return existed;
}
/** Gets the {@link Graph} used to run the graph. */
public Graph getGraph() {
return mediapipeGraph;
}
/** Gets the {@link PacketCreator} associated with the graph. */
public AndroidPacketCreator getPacketCreator() {
return packetCreator;
}
/** Gets the {@link SurfaceOutput} connected to the video output stream. */
public SurfaceOutput getVideoSurfaceOutput() {
return videoSurfaceOutput;
}
/** Closes and cleans up the graph. */
public void close() {
if (started.get()) {
try {
mediapipeGraph.closeAllPacketSources();
mediapipeGraph.waitUntilGraphDone();
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
try {
mediapipeGraph.tearDown();
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
}
}
/**
* Initializes the graph in advance of receiving frames.
*
* <p>Normally the graph is initialized when the first frame arrives. You can optionally call this
* method to initialize it ahead of time.
* @throws MediaPipeException for any error status.
*/
public void preheat() {
if (!started.getAndSet(true)) {
startGraph();
}
}
public void setOnWillAddFrameListener(@Nullable OnWillAddFrameListener addFrameListener) {
this.addFrameListener = addFrameListener;
}
/**
* Returns true if the MediaPipe graph can accept one more input frame.
* @throws MediaPipeException for any error status.
*/
private boolean maybeAcceptNewFrame() {
if (!started.getAndSet(true)) {
startGraph();
}
return true;
}
@Override
public void onNewFrame(final TextureFrame frame) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(
TAG,
String.format(
"Input tex: %d width: %d height: %d",
frame.getTextureName(), frame.getWidth(), frame.getHeight()));
}
if (!maybeAcceptNewFrame()) {
frame.release();
return;
}
if (addFrameListener != null) {
addFrameListener.onWillAddFrame(frame.getTimestamp());
}
Packet imagePacket = packetCreator.createGpuBuffer(frame);
try {
// addConsumablePacketToInputStream allows the graph to take exclusive ownership of the
// packet, which may allow for more memory optimizations.
mediapipeGraph.addConsumablePacketToInputStream(
videoInputStream, imagePacket, frame.getTimestamp());
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
imagePacket.release();
}
/**
* Accepts a Bitmap to be sent to main input stream at the given timestamp.
*
* <p>Note: This requires a graph that takes an ImageFrame instead of a mediapipe::GpuBuffer. An
* instance of FrameProcessor should only ever use this or the other variant for onNewFrame().
*/
public void onNewFrame(final Bitmap bitmap, long timestamp) {
if (!maybeAcceptNewFrame()) {
return;
}
if (!hybridPath && addFrameListener != null) {
addFrameListener.onWillAddFrame(timestamp);
}
Packet packet = getPacketCreator().createRgbImageFrame(bitmap);
try {
// addConsumablePacketToInputStream allows the graph to take exclusive ownership of the
// packet, which may allow for more memory optimizations.
mediapipeGraph.addConsumablePacketToInputStream(videoInputStreamCpu, packet, timestamp);
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
packet.release();
}
public void waitUntilIdle() {
try {
mediapipeGraph.waitUntilGraphIdle();
} catch (MediaPipeException e) {
Log.e(TAG, "Mediapipe error: ", e);
}
}
/**
* Starts running the MediaPipe graph.
* @throws MediaPipeException for any error status.
*/
private void startGraph() {
mediapipeGraph.startRunningGraph();
}
}

View File

@ -0,0 +1,295 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.AudioTimestamp;
import android.media.MediaRecorder.AudioSource;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.util.Log;
import javax.annotation.Nullable;
/** Provides access to audio data from a microphone. */
public class MicrophoneHelper {
/** The listener is called when audio data from the microphone is available. */
public interface OnAudioDataAvailableListener {
public void onAudioDataAvailable(byte[] audioData, long timestampMicros);
}
private static final String TAG = "MicrophoneHelper";
private static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AUDIO_SOURCE = AudioSource.MIC;
// A small constant valued multiplier for setting bufferSize. This is useful
// to reduce buffer overflows when a lot of data needs to be read at a high
// sample rate from the audio stream. Note that it is desirable to keep this
// multiplier small, because very large buffer sizes can slow down blocking
// calls to AudioRecord.read(...) when the sample rate is low for instance.
private static final int BUFFER_SIZE_MULTIPLIER = 2;
// A small constant value to decide the number of seconds of audio data that
// will be read in a single AudioRecord.read(...) call when
// AudioRecord.minBufferSize(...) is unavailable. Smaller values for this
// constant favor faster blocking calls to AudioRecord.read(...).
private static final int MAX_READ_INTERVAL_SEC = 1;
// This class uses AudioFormat.ENCODING_PCM_16BIT, i.e. 16 bits per single channel sample.
private static final int BYTES_PER_MONO_SAMPLE = 2;
private static final long UNINITIALIZED_TIMESTAMP = -1;
private static final long NANOS_PER_MICROS = 1000;
private static final long MICROS_PER_SECOND = 1000000;
// Number of audio samples recorded per second.
private final int sampleRateInHz;
// Channel configuration of audio source, one of AudioRecord.CHANNEL_IN_MONO or
// AudioRecord.CHANNEL_IN_STEREO.
private final int channelConfig;
// Data storage allocated to record audio samples in a single function call to AudioRecord.read().
private final int bufferSize;
// Bytes used per sample, accounts for number of channels of audio source. Possible values are 2
// bytes for a 1-channel sample and 4 bytes for a 2-channel sample.
private final int bytesPerSample;
private byte[] audioData;
// Timestamp provided by the AudioTimestamp object.
private AudioTimestamp audioTimestamp;
// Initial timestamp base. Can be set by the client so that all timestamps calculated using the
// number of samples read per AudioRecord.read() function call start from this timestamp.
private long initialTimestamp = UNINITIALIZED_TIMESTAMP;
// The total number of samples read from multiple calls to AudioRecord.read(). This is reset to
// zero for every startMicrophone() call.
private long totalNumSamplesRead;
// AudioRecord is used to setup a way to record data from the audio source. See
// https://developer.android.com/reference/android/media/AudioRecord.htm for details.
private AudioRecord audioRecord;
// Data is read on a separate non-blocking thread.
private Thread recordingThread;
// This flag determines if audio will be read from the audio source and if the data read will be
// sent to the listener of this class.
private boolean recording = false;
// This listener is provided with the data read on every AudioRecord.read() call. If the listener
// called stopRecording() while a call to AudioRecord.read() was blocked, the class will discard
// the data read after recording stopped.
private OnAudioDataAvailableListener onAudioDataAvailableListener;
/**
* MicrophoneHelper class constructor. Arugments:
*
* @param sampleRateInHz Number of samples per second to be read from audio stream.
* @param channelConfig Configuration of audio channels. See
* https://developer.android.com/reference/android/media/AudioRecord.html#public-constructors_1.
*/
public MicrophoneHelper(int sampleRateInHz, int channelConfig) {
this.sampleRateInHz = sampleRateInHz;
this.channelConfig = channelConfig;
// Number of channels of audio source, depending on channelConfig.
final int channelCount = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1;
bytesPerSample = BYTES_PER_MONO_SAMPLE * channelCount;
// The minimum buffer size required by AudioRecord.
final int minBufferSize =
AudioRecord.getMinBufferSize(
sampleRateInHz, channelConfig, /*audioFormat=*/ AUDIO_ENCODING);
// Set bufferSize. If the minimum buffer size permitted by the hardware is
// unavailable, use the the sampleRateInHz value as the number of bytes.
// This is arguably better than another arbitrary constant because a higher
// value of sampleRateInHz implies the need for reading large chunks of data
// from the audio stream in each AudioRecord.read(...) call.
if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG, "AudioRecord minBufferSize unavailable.");
bufferSize = sampleRateInHz * MAX_READ_INTERVAL_SEC * bytesPerSample * BUFFER_SIZE_MULTIPLIER;
} else {
bufferSize = minBufferSize * BUFFER_SIZE_MULTIPLIER;
}
}
private void setupAudioRecord() {
audioData = new byte[bufferSize];
Log.d(TAG, "AudioRecord(" + sampleRateInHz + ", " + bufferSize + ")");
audioRecord =
new AudioRecord.Builder()
.setAudioSource(AUDIO_SOURCE)
.setAudioFormat(
new AudioFormat.Builder()
.setEncoding(AUDIO_ENCODING)
.setSampleRate(sampleRateInHz)
.setChannelMask(channelConfig)
.build())
.setBufferSizeInBytes(bufferSize)
.build();
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord.release();
Log.e(TAG, "AudioRecord could not open.");
return;
}
recordingThread =
new Thread(
() -> {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
Log.v(TAG, "Running audio recording thread.");
// Initial timestamp in case the AudioRecord.getTimestamp() function is unavailable.
long startTimestamp = initialTimestamp != UNINITIALIZED_TIMESTAMP
? initialTimestamp
: System.nanoTime() / NANOS_PER_MICROS;
long sampleBasedTimestamp;
while (recording) {
if (audioRecord == null) {
break;
}
final int numBytesRead =
audioRecord.read(audioData, /*offsetInBytes=*/ 0, /*sizeInBytes=*/ bufferSize);
// If AudioRecord.getTimestamp() is unavailable, calculate the timestamp using the
// number of samples read in the call to AudioRecord.read().
long sampleBasedFallbackTimestamp =
startTimestamp + totalNumSamplesRead * MICROS_PER_SECOND / sampleRateInHz;
sampleBasedTimestamp =
getTimestamp(/*fallbackTimestamp=*/sampleBasedFallbackTimestamp);
if (numBytesRead <= 0) {
if (numBytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
Log.e(TAG, "ERROR_INVALID_OPERATION");
} else if (numBytesRead == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG, "ERROR_BAD_VALUE");
}
continue;
}
Log.v(TAG, "Read " + numBytesRead + " bytes of audio data.");
// Confirm that the listener is still interested in receiving audio data and
// stopMicrophone() wasn't called. If the listener called stopMicrophone(), discard
// the data read in the latest AudioRecord.read(...) function call.
if (recording) {
onAudioDataAvailableListener.onAudioDataAvailable(
audioData.clone(), sampleBasedTimestamp);
}
// TODO: Replace byte[] with short[] audioData.
// It is expected that audioRecord.read() will read full samples and therefore
// numBytesRead is expected to be a multiple of bytesPerSample.
int numSamplesRead = numBytesRead / bytesPerSample;
totalNumSamplesRead += numSamplesRead;
}
});
}
// If AudioRecord.getTimestamp() is available and returns without error, this function returns the
// timestamp using AudioRecord.getTimestamp(). If the function is unavailable, it returns a
// fallbackTimestamp provided as an argument to this method.
private long getTimestamp(long fallbackTimestamp) {
// AudioRecord.getTimestamp is only available at API Level 24 and above.
// https://developer.android.com/reference/android/media/AudioRecord.html#getTimestamp(android.media.AudioTimestamp,%20int).
if (VERSION.SDK_INT >= VERSION_CODES.N) {
if (audioTimestamp == null) {
audioTimestamp = new AudioTimestamp();
}
int status = audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
if (status == AudioRecord.SUCCESS) {
return audioTimestamp.nanoTime / NANOS_PER_MICROS;
} else {
Log.e(TAG, "audioRecord.getTimestamp failed with status: " + status);
}
}
return fallbackTimestamp;
}
// Returns the buffer size read by this class per AudioRecord.read() call.
public int getBufferSize() {
return bufferSize;
}
/**
* Overrides the use of system time as the source of timestamps for audio packets. Not
* recommended. Provided to maintain compatibility with existing usage by CameraRecorder.
*/
public void setInitialTimestamp(long initialTimestamp) {
this.initialTimestamp = initialTimestamp;
}
// This method sets up a new AudioRecord object for reading audio data from the microphone. It
// can be called multiple times to restart the recording if necessary.
public void startMicrophone() {
if (recording) {
return;
}
setupAudioRecord();
audioRecord.startRecording();
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
Log.e(TAG, "AudioRecord couldn't start recording.");
audioRecord.release();
return;
}
recording = true;
totalNumSamplesRead = 0;
recordingThread.start();
Log.d(TAG, "AudioRecord is recording audio.");
}
// Stops the AudioRecord object from reading data from the microphone and releases it.
public void stopMicrophone() {
stopMicrophoneWithoutCleanup();
cleanup();
Log.d(TAG, "AudioRecord stopped recording audio.");
}
// Stops the AudioRecord object from reading data from the microphone.
public void stopMicrophoneWithoutCleanup() {
if (!recording) {
return;
}
recording = false;
try {
if (recordingThread != null) {
recordingThread.join();
}
} catch (InterruptedException ie) {
Log.e(TAG, "Exception: ", ie);
}
audioRecord.stop();
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
Log.e(TAG, "AudioRecord.stop() didn't run properly.");
}
}
// Releases the AudioRecord object when there is no ongoing recording.
public void cleanup() {
if (recording) {
return;
}
audioRecord.release();
}
public void setOnAudioDataAvailableListener(@Nullable OnAudioDataAvailableListener listener) {
onAudioDataAvailableListener = listener;
}
}

View File

@ -0,0 +1,93 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.util.Log;
/** Manages camera permission request and handling. */
public class PermissionHelper {
private static final String TAG = "PermissionHelper";
private static final String AUDIO_PERMISSION = Manifest.permission.RECORD_AUDIO;
private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
private static final int REQUEST_CODE = 0;
public static boolean permissionsGranted(Activity context, String[] permissions) {
for (String permission : permissions) {
int permissionStatus = ContextCompat.checkSelfPermission(context, permission);
if (permissionStatus != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
public static void checkAndRequestPermissions(Activity context, String[] permissions) {
if (!permissionsGranted(context, permissions)) {
ActivityCompat.requestPermissions(context, permissions, REQUEST_CODE);
}
}
/** Called by context to check if camera permissions have been granted. */
public static boolean cameraPermissionsGranted(Activity context) {
return permissionsGranted(context, new String[] {CAMERA_PERMISSION});
}
/**
* Called by context to check if camera permissions have been granted and if not, request them.
*/
public static void checkAndRequestCameraPermissions(Activity context) {
Log.d(TAG, "checkAndRequestCameraPermissions");
checkAndRequestPermissions(context, new String[] {CAMERA_PERMISSION});
}
/** Called by context to check if audio permissions have been granted. */
public static boolean audioPermissionsGranted(Activity context) {
return permissionsGranted(context, new String[] {AUDIO_PERMISSION});
}
/** Called by context to check if audio permissions have been granted and if not, request them. */
public static void checkAndRequestAudioPermissions(Activity context) {
Log.d(TAG, "checkAndRequestAudioPermissions");
checkAndRequestPermissions(context, new String[] {AUDIO_PERMISSION});
}
/** Called by context when permissions request has been completed. */
public static void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
Log.d(TAG, "onRequestPermissionsResult");
if (permissions.length > 0 && grantResults.length != permissions.length) {
Log.d(TAG, "Permission denied.");
return;
}
for (int i = 0; i < grantResults.length; ++i) {
if (grantResults[i] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, permissions[i] + " permission granted.");
}
}
// Note: We don't need any special callbacks when permissions are ready because activities
// using this helper class can have code in onResume() which is called after the
// permissions dialog box closes. The code can be branched depending on if permissions are
// available via permissionsGranted(Activity).
return;
}
}

View File

@ -0,0 +1,23 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
import com.google.mediapipe.framework.TextureFrame;
/** Lightweight abstraction for an object that can receive video frames. */
public interface TextureFrameConsumer {
/** Called when a new {@link TextureFrame} is available. */
public abstract void onNewFrame(TextureFrame frame);
}

View File

@ -0,0 +1,21 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
/**
* Lightweight abstraction for an object that can receive video frames, process them, and pass them
* on to another object.
*/
public interface TextureFrameProcessor extends TextureFrameProducer, TextureFrameConsumer {}

View File

@ -0,0 +1,21 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.components;
/** Lightweight abstraction for an object that can produce video frames. */
public interface TextureFrameProducer {
/** Set the consumer that receives the output from this producer. */
void setConsumer(TextureFrameConsumer next);
}

View File

@ -0,0 +1,60 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.content.Context;
import android.content.res.AssetManager;
import com.google.common.io.ByteStreams;
import java.io.IOException;
import java.io.InputStream;
/**
* Helper methods for handling Android assets.
*/
public final class AndroidAssetUtil {
/**
* Returns an asset's contents as a byte array. This is meant to be used in combination with
* {@link Graph#loadBinaryGraph}.
*
* @param assetName The name of an asset, same as in {@link AssetManager#open(String)}.
*/
public static byte[] getAssetBytes(AssetManager assets, String assetName) {
byte[] assetData;
try {
InputStream stream = assets.open(assetName);
assetData = ByteStreams.toByteArray(stream);
stream.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
return assetData;
}
/**
* Initializes the native asset manager, which is used by native code to access assets directly.
*
* <p>Note: When possible, using {@link AssetCache} is preferred for portability, since it does
* not require any special handling for Android assets on the native code side.
*/
public static boolean initializeNativeAssetManager(Context androidContext) {
return nativeInitializeAssetManager(
androidContext, androidContext.getCacheDir().getAbsolutePath());
}
private static native boolean nativeInitializeAssetManager(
Context androidContext, String cacheDirPath);
private AndroidAssetUtil() {}
}

View File

@ -0,0 +1,60 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.graphics.Bitmap;
// TODO: use Preconditions in this file.
/**
* Android-specific subclass of PacketCreator.
*
* <p>See {@link PacketCreator} for general information.
*
* <p>This class contains methods that are Android-specific. You can (and should) use the base
* PacketCreator on Android if you do not need any methods from this class.
*/
public class AndroidPacketCreator extends PacketCreator {
public AndroidPacketCreator(Graph context) {
super(context);
}
/** Creates a 3 channel RGB ImageFrame packet from a {@link Bitmap}. */
public Packet createRgbImageFrame(Bitmap bitmap) {
if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
throw new RuntimeException("bitmap must use ARGB_8888 config.");
}
return Packet.create(nativeCreateRgbImageFrame(mediapipeGraph.getNativeHandle(), bitmap));
}
/** Creates a 4 channel RGBA ImageFrame packet from a {@link Bitmap}. */
public Packet createRgbaImageFrame(Bitmap bitmap) {
if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
throw new RuntimeException("bitmap must use ARGB_8888 config.");
}
return Packet.create(nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), bitmap));
}
/**
* Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on
* failure.
*/
private native long nativeCreateRgbImageFrame(long context, Bitmap bitmap);
/**
* Returns the native handle of a new internal::PacketWithContext object on success. Returns 0 on
* failure.
*/
private native long nativeCreateRgbaImageFrame(long context, Bitmap bitmap);
}

View File

@ -0,0 +1,69 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.graphics.Bitmap;
import com.google.common.flogger.FluentLogger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Android-specific subclass of PacketGetter.
*
* <p>See {@link PacketGetter} for general information.
*
* <p>This class contains methods that are Android-specific.
*/
public final class AndroidPacketGetter {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
/** Gets an {@code ARGB_8888} bitmap from an RGB mediapipe image frame packet. */
public static Bitmap getBitmapFromRgb(Packet packet) {
int width = PacketGetter.getImageWidth(packet);
int height = PacketGetter.getImageHeight(packet);
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
PacketGetter.getRgbaFromRgb(packet, buffer);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
return bitmap;
}
/**
* Gets an {@code ARGB_8888} bitmap from an RGBA mediapipe image frame packet. Returns null in
* case of failure.
*/
public static Bitmap getBitmapFromRgba(Packet packet) {
// TODO: unify into a single getBitmap call.
// TODO: use NDK Bitmap access instead of copyPixelsToBuffer.
int width = PacketGetter.getImageWidth(packet);
int height = PacketGetter.getImageHeight(packet);
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
buffer.order(ByteOrder.nativeOrder());
// Note: even though the Android Bitmap config is named ARGB_8888, the data
// is stored as RGBA internally.
boolean status = PacketGetter.getImageData(packet, buffer);
if (!status) {
logger.atSevere().log(
"Got error from getImageData, returning null Bitmap. Image width %d, height %d",
width, height);
return null;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
return bitmap;
}
private AndroidPacketGetter() {}
}

View File

@ -0,0 +1,157 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* A {@link TextureFrame} that represents a texture produced by the application.
*
* <p>The {@link #waitUntilReleased()} method can be used to wait for the consumer to be done with
* the texture before destroying or overwriting it.
*
* <p>With this class, your application is the producer. The consumer can be MediaPipe (if you send
* the frame into a MediaPipe graph using {@link PacketCreator#createGpuBuffer(TextureFrame)}) or
* your application (if you just hand it to another part of your application without going through
* MediaPipe).
*/
public class AppTextureFrame implements TextureFrame {
private int textureName;
private int width;
private int height;
private long timestamp = Long.MIN_VALUE;
private boolean inUse = false;
private boolean legacyInUse = false; // This ignores GL context sync.
private GlSyncToken releaseSyncToken = null;
public AppTextureFrame(int textureName, int width, int height) {
this.textureName = textureName;
this.width = width;
this.height = height;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
@Override
public int getTextureName() {
return textureName;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public long getTimestamp() {
return timestamp;
}
/**
* Waits until the consumer is done with the texture.
* @throws InterruptedException
*/
public void waitUntilReleased() throws InterruptedException {
synchronized (this) {
while (inUse && releaseSyncToken == null) {
wait();
}
if (releaseSyncToken != null) {
releaseSyncToken.waitOnCpu();
releaseSyncToken.release();
inUse = false;
releaseSyncToken = null;
}
}
}
/**
* Returns whether the texture is currently in use.
*
* @deprecated this ignores cross-context sync. You should use {@link waitUntilReleased} instead,
* because cross-context sync cannot be supported efficiently using this API.
*/
@Deprecated
public boolean getInUse() {
synchronized (this) {
return legacyInUse;
}
}
/**
* Marks the texture as currently in use.
* <p>The producer calls this before handing the texture off to the consumer.
*/
public void setInUse() {
synchronized (this) {
if (releaseSyncToken != null) {
releaseSyncToken.release();
releaseSyncToken = null;
}
inUse = true;
legacyInUse = true;
}
}
/**
* Marks the texture as no longer in use.
* <p>The consumer calls this when it is done using the texture.
*/
@Override
public void release() {
synchronized (this) {
inUse = false;
legacyInUse = false;
notifyAll();
}
}
/**
* Called by MediaPipe when the texture has been released.
*
* <p>The sync token can be used to ensure that the GPU is done reading from the texture.
*/
@Override
public void release(GlSyncToken syncToken) {
synchronized (this) {
if (releaseSyncToken != null) {
releaseSyncToken.release();
releaseSyncToken = null;
}
releaseSyncToken = syncToken;
// Note: we deliberately do not set inUse to false here. Clients should call
// waitUntilReleased. See deprecation notice on getInUse.
legacyInUse = false;
notifyAll();
}
}
@Override
public void finalize() {
// Note: we do not normally want to rely on finalize to dispose of native objects. In this
// case, however, the object is normally disposed of in the wait method; the finalize method
// serves as a fallback in case the application simply drops the object. The token object is
// small, so even if its destruction is delayed, it's not a huge problem.
if (releaseSyncToken != null) {
releaseSyncToken.release();
releaseSyncToken = null;
}
}
}

View File

@ -0,0 +1,206 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.content.Context;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetManager;
import androidx.annotation.VisibleForTesting;
import android.text.TextUtils;
import com.google.common.base.Preconditions;
import com.google.common.flogger.FluentLogger;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.Nullable;
/**
* A singleton class to help accessing assets as normal files in native code.
*
* <p>This class extracts Android assets as files in a cache directory so that they can be accessed
* by code that expects a regular file path.
*
* <p>The cache is automatically purged when the versionCode in the app's manifest changes, to avoid
* using stale assets. If a versionCode is not specified, the cache is disabled.
*/
public class AssetCache {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
@VisibleForTesting static final String MEDIAPIPE_ASSET_CACHE_DIR = "mediapipe_asset_cache";
private static AssetCache assetCache;
private int appVersionCode;
private AssetCacheDbHelper versionDatabase;
private Context context;
/**
* Create {@link AssetCache} with an Android context.
*
* <p>Asset manager needs context to access the asset files. {@link Create} can be called in the
* main activity.
*/
public static synchronized AssetCache create(Context context) {
Preconditions.checkNotNull(context);
if (assetCache == null) {
assetCache = new AssetCache(context);
}
return assetCache;
}
/**
* Purge the cached assets.
*
* <p>This should only be needed in local dev builds that do not update the versionCode in the
* app's manifest.
*/
public static synchronized void purgeCache(Context context) {
AssetCacheDbHelper dbHelper = new AssetCacheDbHelper(context);
dbHelper.invalidateCache(-1);
dbHelper.close();
}
/**
* Get {@link AssetCache} without context.
*
* <p>If not created, {@code null} is returned.
*/
@Nullable
public static synchronized AssetCache getAssetCache() {
return assetCache;
}
/**
* Loads all the assets in a given assets path.
* @param assetsPath the assets path from which to load.
*/
public synchronized void loadAllAssets(String assetsPath) {
Preconditions.checkNotNull(assetsPath);
AssetManager assetManager = context.getAssets();
String[] assetFiles = null;
try {
assetFiles = assetManager.list(assetsPath);
} catch (IOException e) {
logger.atSevere().withCause(e).log("Unable to get files in assets path: %s", assetsPath);
}
if (assetFiles == null || assetFiles.length == 0) {
logger.atWarning().log("No files to load");
return;
}
for (String file : assetFiles) {
// If a path was specified, prepend it to the filename with "/", otherwise, just
// use the file name.
String path = TextUtils.isEmpty(assetsPath) ? file : assetsPath + "/" + file;
getAbsolutePathFromAsset(path);
}
}
/**
* Get the absolute path for an asset file.
*
* <p>The asset will be unpacked to the application's files directory if not already done.
*
* @param assetPath path to a file under asset.
* @return the absolute file system path to the unpacked asset file.
*/
public synchronized String getAbsolutePathFromAsset(String assetPath) {
AssetManager assetManager = context.getAssets();
File destinationDir = getDefaultMediaPipeCacheDir();
destinationDir.mkdir();
File assetFile = new File(assetPath);
String assetName = assetFile.getName();
File destinationFile = new File(destinationDir.getPath(), assetName);
// If app version code is not defined, we don't use cache.
if (destinationFile.exists() && appVersionCode != 0
&& versionDatabase.checkVersion(assetPath, appVersionCode)) {
return destinationFile.getAbsolutePath();
}
InputStream inStream = null;
try {
inStream = assetManager.open(assetPath);
writeStreamToFile(inStream, destinationFile);
} catch (IOException ioe) {
logger.atSevere().log("Unable to unpack: %s", assetPath);
try {
if (inStream != null) {
inStream.close();
}
} catch (IOException ioe2) {
return null;
}
return null;
}
// If app version code is not defined, we don't use cache.
if (appVersionCode != 0) {
versionDatabase.insertAsset(assetPath, destinationFile.getAbsolutePath(), appVersionCode);
}
return destinationFile.getAbsolutePath();
}
/**
* Return all the file names of the assets that were saved to cache from the application's
* resources.
*/
public synchronized String[] getAvailableAssets() {
File assetsDir = getDefaultMediaPipeCacheDir();
if (assetsDir.exists()) {
return assetsDir.list();
}
return new String[0];
}
/**
* Returns the default cache directory used by the AssetCache to store the assets.
*/
public File getDefaultMediaPipeCacheDir() {
return new File(context.getCacheDir(), MEDIAPIPE_ASSET_CACHE_DIR);
}
private AssetCache(Context context) {
this.context = context;
versionDatabase = new AssetCacheDbHelper(context);
try {
appVersionCode = context.getPackageManager()
.getPackageInfo(context.getPackageName(), 0).versionCode;
logger.atInfo().log("Current app version code: %d", appVersionCode);
} catch (NameNotFoundException e) {
throw new RuntimeException("Can't get app version code.", e);
}
// Remove all the cached items that don't agree with the current app version.
versionDatabase.invalidateCache(appVersionCode);
}
private static void writeStreamToFile(InputStream inStream, File destinationFile)
throws IOException {
final int bufferSize = 1000;
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(destinationFile);
byte[] buffer = new byte[bufferSize];
while (true) {
int n = inStream.read(buffer);
if (n == -1) {
break;
}
outStream.write(buffer, 0, n);
}
} finally {
if (outStream != null) {
outStream.close();
}
}
}
}

View File

@ -0,0 +1,175 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.provider.BaseColumns;
import com.google.common.flogger.FluentLogger;
import java.io.File;
/**
* Database to keep the cached version of asset valid.
*/
public class AssetCacheDbHelper extends SQLiteOpenHelper {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
public static final int DATABASE_VERSION = 2;
public static final String DATABASE_NAME = "mediapipe.db";
private static final String INT_TYPE = " INTEGER";
private static final String TEXT_TYPE = " TEXT";
private static final String TEXT_UNIQUE_TYPE = " TEXT NOT NULL UNIQUE";
private static final String COMMA_SEP = ",";
private static final String SQL_CREATE_TABLE =
"CREATE TABLE " + AssetCacheEntry.TABLE_NAME + " ("
+ AssetCacheEntry._ID + " INTEGER PRIMARY KEY,"
+ AssetCacheEntry.COLUMN_NAME_ASSET + TEXT_UNIQUE_TYPE + COMMA_SEP
+ AssetCacheEntry.COLUMN_NAME_CACHE_PATH + TEXT_TYPE + COMMA_SEP
+ AssetCacheEntry.COLUMN_NAME_VERSION + INT_TYPE + " )";
private static final String SQL_DELETE_TABLE =
"DROP TABLE IF EXISTS " + AssetCacheEntry.TABLE_NAME;
/**
* The columns in the AssetVersion table.
*/
public abstract static class AssetCacheEntry implements BaseColumns {
public static final String TABLE_NAME = "AssetVersion";
public static final String COLUMN_NAME_ASSET = "asset";
public static final String COLUMN_NAME_CACHE_PATH = "cache_path";
public static final String COLUMN_NAME_VERSION = "version";
}
public AssetCacheDbHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
/**
* Check if the cached version is current in database.
*
* @return true if the asset is cached and the app is not upgraded. Otherwise return false.
*/
public boolean checkVersion(String assetPath, int currentAppVersion) {
SQLiteDatabase db = getReadableDatabase();
String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ?";
String[] projection = {AssetCacheEntry.COLUMN_NAME_VERSION};
String[] selectionArgs = {assetPath};
Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs);
if (cursor.getCount() == 0) {
return false;
}
cursor.moveToFirst();
int cachedVersion = cursor.getInt(
cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_VERSION));
cursor.close();
return cachedVersion == currentAppVersion;
}
/**
* Remove all entries in the version table that don't have the correct version.
*
* <p>Invalidates all cached asset contents that doesn't have the specified version.
*/
public void invalidateCache(int currentAppVersion) {
SQLiteDatabase db = getWritableDatabase();
String selection = AssetCacheEntry.COLUMN_NAME_VERSION + " != ?";
String[] selectionArgs = {Integer.toString(currentAppVersion)};
// Remve the cached files.
removeCachedFiles(db, selection, selectionArgs);
// Remve the rows in the table.
db.delete(AssetCacheEntry.TABLE_NAME, selection, selectionArgs);
}
/**
* Insert the cached version of the asset into the database.
*/
public void insertAsset(String asset, String cachePath, int appVersion) {
SQLiteDatabase db = getWritableDatabase();
// Remove the old cached file first if they are different from the new cachePath.
String selection = AssetCacheEntry.COLUMN_NAME_ASSET + " = ? and "
+ AssetCacheEntry.COLUMN_NAME_CACHE_PATH + " != ?";
String[] selectionArgs = {asset, cachePath};
removeCachedFiles(db, selection, selectionArgs);
ContentValues values = new ContentValues();
values.put(AssetCacheEntry.COLUMN_NAME_ASSET, asset);
values.put(AssetCacheEntry.COLUMN_NAME_CACHE_PATH, cachePath);
values.put(AssetCacheEntry.COLUMN_NAME_VERSION, appVersion);
long newRowId = db.insertWithOnConflict(
AssetCacheEntry.TABLE_NAME,
null,
values,
SQLiteDatabase.CONFLICT_REPLACE);
// According to documentation, -1 means any error.
if (newRowId == -1) {
throw new RuntimeException("Can't insert entry into the mediapipe db.");
}
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(SQL_CREATE_TABLE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// Since version 1 doesn't have the path in the table, just upgrade the table.
db.execSQL(SQL_DELETE_TABLE);
onCreate(db);
}
@Override
public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
onUpgrade(db, oldVersion, newVersion);
}
private Cursor queryAssetCacheTable(
SQLiteDatabase db, String[] projection, String selection, String[] selectionArgs) {
return db.query(
AssetCacheEntry.TABLE_NAME, // The table to query
projection, // The columns to return
selection, // The columns for the WHERE clause
selectionArgs, // The values for the WHERE clause
null, // don't group the rows
null, // don't filter by row groups
null // The sort order
);
}
private void removeCachedFiles(SQLiteDatabase db, String selection, String[] selectionArgs) {
String[] projection = {AssetCacheEntry.COLUMN_NAME_CACHE_PATH};
Cursor cursor = queryAssetCacheTable(db, projection, selection, selectionArgs);
if (cursor.moveToFirst()) {
do {
String cachedPath = cursor.getString(
cursor.getColumnIndexOrThrow(AssetCacheEntry.COLUMN_NAME_CACHE_PATH));
File file = new File(cachedPath);
if (file.exists()) {
if (!file.delete()) {
logger.atWarning().log("Stale cached file: %s can't be deleted.", cachedPath);
}
}
} while (cursor.moveToNext());
}
cursor.close();
}
}

View File

@ -0,0 +1,84 @@
# Copyright 2019 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"]) # Apache 2.0
# MediaPipe Android framework.
exports_files(["proguard.pgcfg"])
android_library(
name = "android_framework",
proguard_specs = [
":proguard.pgcfg",
],
visibility = ["//visibility:public"],
exports = [
":android_core",
":android_framework_no_mff",
],
)
# TODO: Rename android_framework_no_mff.
android_library(
name = "android_framework_no_mff",
srcs = glob(
["Android*.java"],
) + [
"AssetCache.java",
"AssetCacheDbHelper.java",
"MediaPipeRunner.java",
],
proguard_specs = [
":proguard.pgcfg",
],
exports = [
":android_core",
],
deps = [
":android_core",
"//third_party:androidx_annotation",
"//third_party:androidx_legacy_support_v4",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
],
)
# This is the Android version of "framework".
# TODO: unify once allowed by bazel.
# Note: this is not called "android_framework" for historical reasons (that target
# also includes other libraries).
android_library(
name = "android_core",
srcs = glob(
["**/*.java"],
exclude = [
"Android*",
"AssetCache.java",
"AssetCacheDbHelper.java",
"MediaPipeRunner.java",
],
),
deps = [
"//mediapipe/framework:calculator_java_proto_lite",
"//mediapipe/framework:calculator_profile_java_proto_lite",
"//mediapipe/framework/tool:calculator_graph_template_java_proto_lite",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
],
)

View File

@ -0,0 +1,34 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Utilities for compatibility with old versions of Android.
*/
public class Compat {
/**
* Returns the native handle to the current EGL context. Can be used as a
* replacement for EGL14.eglGetCurrentContext().getNativeHandle() before
* API 17.
*/
public static native long getCurrentNativeEGLContext();
/**
* Returns the native handle to the current EGL surface. Can be used as a
* replacement for EGL14.eglGetCurrentSurface().getNativeHandle() before
* API 17.
*/
public static native long getCurrentNativeEGLSurface(int readdraw);
}

View File

@ -0,0 +1,2 @@
<!-- Intended to be empty to get around the issue of not able to build a
android_native_library without a AndroidManifest.xml file. -->

View File

@ -0,0 +1,37 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Represents a synchronization point for OpenGL operations. This can be needed when working with
* multiple GL contexts.
*/
public interface GlSyncToken {
/**
* Waits until the GPU has executed all commands up to the sync point. This blocks the CPU, and
* ensures the commands are complete from the point of view of all threads and contexts.
*/
void waitOnCpu();
/**
* Ensures that the following commands on the current OpenGL context will not be executed until
* the sync point has been reached. This does not block the CPU, and only affects the current
* OpenGL context.
*/
void waitOnGpu();
/** Releases the underlying native object. */
void release();
}

View File

@ -0,0 +1,658 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import com.google.common.base.Preconditions;
import com.google.common.flogger.FluentLogger;
import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
//import com.google.mediapipe.proto.GraphTemplateProto.CalculatorGraphTemplate;
//import com.google.protobuf.InvalidProtocolBufferException;
/**
* MediaPipe-related context.
*
* <p>Main purpose is to faciliate the memory management for native allocated mediapipe objects.
*/
public class Graph {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private static final int MAX_BUFFER_SIZE = 20;
private long nativeGraphHandle;
// Hold the references to callbacks.
private final List<PacketCallback> packetCallbacks = new ArrayList<>();
private final List<PacketWithHeaderCallback> packetWithHeaderCallbacks = new ArrayList<>();
// Side packets used for running the graph.
private Map<String, Packet> sidePackets = new HashMap<>();
// Stream headers used for running the graph.
private Map<String, Packet> streamHeaders = new HashMap<>();
// The mode of running used by this context.
// Based on the value of this mode, the caller can use {@link waitUntilIdle} to synchronize with
// the mediapipe native graph runner.
private boolean stepMode = false;
private boolean startRunningGraphCalled = false;
private boolean graphRunning = false;
/** Helper class for a buffered Packet and its timestamp. */
private static class PacketBufferItem {
private PacketBufferItem(Packet packet, Long timestamp) {
this.packet = packet;
this.timestamp = timestamp;
}
final Packet packet;
final Long timestamp;
}
private Map<String, ArrayList<PacketBufferItem>> packetBuffers = new HashMap<>();
// This is used for methods that need to ensure the native context is alive
// while still allowing other methods of this class to execute concurrently.
// Note: if a method needs to acquire both this lock and the Graph intrinsic monitor,
// it must acquire the intrinsic monitor first.
private final Object terminationLock = new Object();
public Graph() {
nativeGraphHandle = nativeCreateGraph();
}
public synchronized long getNativeHandle() {
return nativeGraphHandle;
}
public synchronized void setStepMode(boolean stepMode) {
this.stepMode = stepMode;
}
public synchronized boolean getStepMode() {
return stepMode;
}
/**
* Loads a binary mediapipe graph using an absolute file path.
*
* @param path An absolute file path to a mediapipe graph. An absolute file path can be obtained
* from asset file using {@link AssetCache}.
*/
public synchronized void loadBinaryGraph(String path) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
nativeLoadBinaryGraph(nativeGraphHandle, path);
}
/** Loads a binary mediapipe graph from a byte array. */
public synchronized void loadBinaryGraph(byte[] data) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
nativeLoadBinaryGraphBytes(nativeGraphHandle, data);
}
/** Specifies a CalculatorGraphConfig for a mediapipe graph or subgraph. */
public synchronized void loadBinaryGraph(CalculatorGraphConfig config) {
loadBinaryGraph(config.toByteArray());
}
/** Specifies a CalculatorGraphTemplate for a mediapipe graph or subgraph.
public synchronized void loadBinaryGraphTemplate(CalculatorGraphTemplate template) {
nativeLoadBinaryGraphTemplate(nativeGraphHandle, template.toByteArray());
}
/** Specifies the CalculatorGraphConfig::type of the top level graph. */
public synchronized void setGraphType(String graphType) {
nativeSetGraphType(nativeGraphHandle, graphType);
}
/** Specifies options such as template arguments for the graph. */
public synchronized void setGraphOptions(CalculatorGraphConfig.Node options) {
nativeSetGraphOptions(nativeGraphHandle, options.toByteArray());
}
/**
* Returns the canonicalized CalculatorGraphConfig with subgraphs and graph templates expanded.
*/
public synchronized CalculatorGraphConfig getCalculatorGraphConfig() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
byte[] data = nativeGetCalculatorGraphConfig(nativeGraphHandle);
if (data != null) {
try {
return CalculatorGraphConfig.parseFrom(data);
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
return null;
}
/**
* Adds a {@link PacketCallback} to the context for callback during graph running.
*
* @param streamName The output stream name in the graph for callback.
* @param callback The callback for handling the call when output stream gets a {@link Packet}.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketCallback(String streamName, PacketCallback callback) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
Preconditions.checkNotNull(streamName);
Preconditions.checkNotNull(callback);
Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
packetCallbacks.add(callback);
nativeAddPacketCallback(nativeGraphHandle, streamName, callback);
}
/**
* Adds a {@link PacketWithHeaderCallback} to the context for callback during graph running.
*
* @param streamName The output stream name in the graph for callback.
* @param callback The callback for handling the call when output stream gets a {@link Packet} and
* has a stream header.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketWithHeaderCallback(
String streamName, PacketWithHeaderCallback callback) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
Preconditions.checkNotNull(streamName);
Preconditions.checkNotNull(callback);
Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
packetWithHeaderCallbacks.add(callback);
nativeAddPacketWithHeaderCallback(nativeGraphHandle, streamName, callback);
}
/**
* Adds a {@link SurfaceOutput} for a stream producing GpuBuffers.
*
* <p>Multiple outputs can be attached to the same stream.
*
* @param streamName The output stream name in the graph.
* @result a new SurfaceOutput.
*/
public synchronized SurfaceOutput addSurfaceOutput(String streamName) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
Preconditions.checkNotNull(streamName);
Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
// TODO: check if graph is loaded.
return new SurfaceOutput(
this, Packet.create(nativeAddSurfaceOutput(nativeGraphHandle, streamName)));
}
/**
* Sets the input side packets needed for running the graph.
*
* @param sidePackets MediaPipe input side packet name to {@link Packet} map.
*/
public synchronized void setInputSidePackets(Map<String, Packet> sidePackets) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
for (Map.Entry<String, Packet> entry : sidePackets.entrySet()) {
this.sidePackets.put(entry.getKey(), entry.getValue().copy());
}
}
public synchronized <T> void setServiceObject(GraphService<T> service, T object) {
service.installServiceObject(nativeGraphHandle, object);
}
/**
* This tells the {@link Graph} before running the graph, we are expecting those headers to be
* available first. This function is usually called before the streaming starts.
*
* <p>Note: Because of some MediaPipe calculators need statically available header info before the
* graph is running, we need to have this to synchronize the running of graph with the
* availability of the header streams.
*/
public synchronized void addStreamNameExpectingHeader(String streamName) {
Preconditions.checkState(!graphRunning && !startRunningGraphCalled);
streamHeaders.put(streamName, null);
}
/**
* Sets the stream header for specific stream if the header is not set.
*
* <p>If graph is already waiting for being started, start graph when all stream headers are set.
*
* <p>Note: If streamHeader is already being set, this call will not override the previous set
* value. To override, call the function below instead.
*/
public synchronized void setStreamHeader(String streamName, Packet streamHeader) {
setStreamHeader(streamName, streamHeader, false);
}
/**
* Sets the stream header for specific stream.
*
* <p>If graph is already waiting for being started, start graph when all stream headers are set.
*
* @param override if true, override the previous set header, however, if graph is running, {@link
* IllegalArgumentException} will be thrown.
*/
public synchronized void setStreamHeader(
String streamName, Packet streamHeader, boolean override) {
Packet header = streamHeaders.get(streamName);
if (header != null) {
if (override) {
if (graphRunning) {
throw new IllegalArgumentException(
"Can't override an existing stream header, after graph started running.");
}
header.release();
} else {
// Don't override, so just return since header is set already.
return;
}
}
streamHeaders.put(streamName, streamHeader.copy());
if (!graphRunning && startRunningGraphCalled && hasAllStreamHeaders()) {
startRunningGraph();
}
}
/**
* Runs the mediapipe graph until it finishes.
*
* <p>Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
* @throws MediaPipeException for any error status.
*/
public synchronized void runGraphUntilClose() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
Preconditions.checkNotNull(sidePackets);
String[] streamNames = new String[sidePackets.size()];
long[] packets = new long[sidePackets.size()];
splitStreamNamePacketMap(sidePackets, streamNames, packets);
nativeRunGraphUntilClose(nativeGraphHandle, streamNames, packets);
}
/**
* Starts running the MediaPipe graph.
*
* <p>Returns immediately after starting the scheduler.
*
* <p>Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
* @throws MediaPipeException for any error status.
*/
public synchronized void startRunningGraph() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
startRunningGraphCalled = true;
if (!hasAllStreamHeaders()) {
// Graph will be runned later when all stream headers are assembled.
logger.atInfo().log("MediaPipe graph won't start until all stream headers are available.");
return;
}
// Prepare the side packets.
String[] sidePacketNames = new String[sidePackets.size()];
long[] sidePacketHandles = new long[sidePackets.size()];
splitStreamNamePacketMap(sidePackets, sidePacketNames, sidePacketHandles);
// Prepare the Stream headers.
String[] streamNamesWithHeader = new String[streamHeaders.size()];
long[] streamHeaderHandles = new long[streamHeaders.size()];
splitStreamNamePacketMap(streamHeaders, streamNamesWithHeader, streamHeaderHandles);
nativeStartRunningGraph(
nativeGraphHandle,
sidePacketNames,
sidePacketHandles,
streamNamesWithHeader,
streamHeaderHandles);
// Packets can be buffered before the actual mediapipe graph starts. Send them in now, if we
// started successfully.
graphRunning = true;
moveBufferedPacketsToInputStream();
}
/**
* Sets blocking behavior when adding packets to a graph input stream via {@link
* addPacketToInputStream}. If set to true, the method will block until all dependent input
* streams fall below the maximum queue size set in the graph config. If false, it will return and
* not add a packet if any dependent input stream is full. To add a packet unconditionally, set
* the maximum queue size to -1 in the graph config.
*/
public synchronized void setGraphInputStreamBlockingMode(boolean mode) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
Preconditions.checkState(!graphRunning);
nativeSetGraphInputStreamBlockingMode(nativeGraphHandle, mode);
}
/**
* Adds one packet into a graph input stream based on the graph stream input mode.
*
* @param streamName the name of the input stream.
* @param packet the mediapipe packet.
* @param timestamp the timestamp of the packet, although not enforced, the unit is normally
* microsecond.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketToInputStream(
String streamName, Packet packet, long timestamp) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
if (!graphRunning) {
addPacketToBuffer(streamName, packet.copy(), timestamp);
} else {
nativeAddPacketToInputStream(
nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp);
}
}
/**
* Adds one packet into a graph input stream based on the graph stream input mode. Also
* simultaneously yields ownership over to the graph stream, so additional memory optimizations
* are possible. When the function ends normally, the packet will be consumed and should no longer
* be referenced. When the function ends with MediaPipeException, the packet will remain
* unaffected, so this call may be retried later.
*
* @param streamName the name of the input stream.
* @param packet the mediapipe packet.
* @param timestamp the timestamp of the packet, although not enforced, the unit is normally
* microsecond.
* @throws MediaPipeException for any error status.
*/
public synchronized void addConsumablePacketToInputStream(
String streamName, Packet packet, long timestamp) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
if (!graphRunning) {
addPacketToBuffer(streamName, packet.copy(), timestamp);
// Release current packet to honor move semantics.
packet.release();
} else {
// We move the packet here into native, allowing it to take full control.
nativeMovePacketToInputStream(
nativeGraphHandle, streamName, packet.getNativeHandle(), timestamp);
// The Java handle is released now if the packet was successfully moved. Otherwise the Java
// handle continues to own the packet contents.
packet.release();
}
}
/**
* Closes the specified input stream.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeInputStream(String streamName) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeCloseInputStream(nativeGraphHandle, streamName);
}
/**
* Closes all the input streams in the mediapipe graph.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeAllInputStreams() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeCloseAllInputStreams(nativeGraphHandle);
}
/**
* Closes all the input streams and source calculators in the mediapipe graph.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeAllPacketSources() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeCloseAllPacketSources(nativeGraphHandle);
}
/**
* Waits until the graph is done processing.
*
* <p>This should be called after all sources and input streams are closed.
* @throws MediaPipeException for any error status.
*/
public synchronized void waitUntilGraphDone() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeWaitUntilGraphDone(nativeGraphHandle);
}
/**
* Waits until the graph runner is idle.
* @throws MediaPipeException for any error status.
*/
public synchronized void waitUntilGraphIdle() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeWaitUntilGraphIdle(nativeGraphHandle);
}
/** Releases the native mediapipe context. */
public synchronized void tearDown() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
for (Map.Entry<String, Packet> entry : sidePackets.entrySet()) {
entry.getValue().release();
}
sidePackets.clear();
for (Map.Entry<String, Packet> entry : streamHeaders.entrySet()) {
if (entry.getValue() != null) {
entry.getValue().release();
}
}
streamHeaders.clear();
for (Map.Entry<String, ArrayList<PacketBufferItem>> entry : packetBuffers.entrySet()) {
for (PacketBufferItem item : entry.getValue()) {
item.packet.release();
}
}
packetBuffers.clear();
synchronized (terminationLock) {
if (nativeGraphHandle != 0) {
nativeReleaseGraph(nativeGraphHandle);
nativeGraphHandle = 0;
}
}
packetCallbacks.clear();
packetWithHeaderCallbacks.clear();
}
/**
* Updates the value of a MediaPipe packet that holds a reference to another MediaPipe packet.
*
* <p>This updates a mutable packet. Useful for the caluclator that needs to have an external way
* of updating the parameters using input side packets.
*
* <p>After calling this, the newPacket can be released (calling newPacket.release()), if no
* longer need to use it in Java. The {@code referencePacket} already holds the reference.
*
* @param referencePacket a mediapipe packet that has the value type Packet*.
* @param newPacket the new value for the reference packet to hold.
*/
public synchronized void updatePacketReference(Packet referencePacket, Packet newPacket) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
nativeUpdatePacketReference(
referencePacket.getNativeHandle(), newPacket.getNativeHandle());
}
/**
* Creates a shared GL runner with the specified name so that MediaPipe calculators can use
* OpenGL. This runner should be connected to the calculators by specifiying an input side packet
* in the graph file with the same name.
*
* @throws MediaPipeException for any error status.
* @deprecated Call {@link setParentGlContext} to set up texture sharing between contexts. Apart
* from that, GL is set up automatically.
*/
@Deprecated
public synchronized void createGlRunner(String name, long javaGlContext) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
Preconditions.checkArgument(name.equals("gpu_shared"));
setParentGlContext(javaGlContext);
}
/**
* Specifies an external GL context to use as the parent of MediaPipe's GL context. This will
* enable the sharing of textures and other objects between the two contexts.
*
* <p>Cannot be called after the graph has been started.
* @throws MediaPipeException for any error status.
*/
public synchronized void setParentGlContext(long javaGlContext) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
Preconditions.checkState(!graphRunning);
nativeSetParentGlContext(nativeGraphHandle, javaGlContext);
}
/**
* Cancels the running graph.
*/
public synchronized void cancelGraph() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
nativeCancelGraph(nativeGraphHandle);
}
/** Returns {@link GraphProfiler}. */
public GraphProfiler getProfiler() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already.");
return new GraphProfiler(nativeGetProfiler(nativeGraphHandle), this);
}
private boolean addPacketToBuffer(String streamName, Packet packet, long timestamp) {
if (!packetBuffers.containsKey(streamName)) {
packetBuffers.put(streamName, new ArrayList<PacketBufferItem>());
}
List<PacketBufferItem> buffer = packetBuffers.get(streamName);
if (buffer.size() > MAX_BUFFER_SIZE) {
for (Map.Entry<String, Packet> entry : streamHeaders.entrySet()) {
if (entry.getValue() == null) {
logger.atSevere().log("Stream: %s might be missing.", entry.getKey());
}
}
throw new RuntimeException("Graph is not started because of missing streams");
}
buffer.add(new PacketBufferItem(packet, timestamp));
return true;
}
// Any previously-buffered packets should be passed along to our graph. They've already been
// copied into our buffers, so it's fine to move them all over to native.
private void moveBufferedPacketsToInputStream() {
if (!packetBuffers.isEmpty()) {
for (Map.Entry<String, ArrayList<PacketBufferItem>> entry : packetBuffers.entrySet()) {
for (PacketBufferItem item : entry.getValue()) {
try {
nativeMovePacketToInputStream(
nativeGraphHandle, entry.getKey(), item.packet.getNativeHandle(), item.timestamp);
} catch (MediaPipeException e) {
logger.atSevere().log(
"AddPacket for stream: %s failed: %s.", entry.getKey(), e.getMessage());
throw e;
}
// Need to release successfully moved packets
item.packet.release();
}
}
packetBuffers.clear();
}
}
private static void splitStreamNamePacketMap(
Map<String, Packet> namePacketMap, String[] streamNames, long[] packets) {
if (namePacketMap.size() != streamNames.length || namePacketMap.size() != packets.length) {
throw new RuntimeException("Input array length doesn't match the map size!");
}
int i = 0;
for (Map.Entry<String, Packet> entry : namePacketMap.entrySet()) {
streamNames[i] = entry.getKey();
packets[i] = entry.getValue().getNativeHandle();
++i;
}
}
private boolean hasAllStreamHeaders() {
for (Map.Entry<String, Packet> entry : streamHeaders.entrySet()) {
if (entry.getValue() == null) {
return false;
}
}
return true;
}
private native long nativeCreateGraph();
private native void nativeReleaseGraph(long context);
private native void nativeAddPacketCallback(
long context, String streamName, PacketCallback callback);
private native void nativeAddPacketWithHeaderCallback(
long context, String streamName, PacketWithHeaderCallback callback);
private native long nativeAddSurfaceOutput(long context, String streamName);
private native void nativeLoadBinaryGraph(long context, String path);
private native void nativeLoadBinaryGraphBytes(long context, byte[] data);
private native void nativeLoadBinaryGraphTemplate(long context, byte[] data);
private native void nativeSetGraphType(long context, String graphType);
private native void nativeSetGraphOptions(long context, byte[] data);
private native byte[] nativeGetCalculatorGraphConfig(long context);
private native void nativeRunGraphUntilClose(long context, String[] streamNames, long[] packets);
private native void nativeStartRunningGraph(
long context,
String[] sidePacketNames,
long[] sidePacketHandles,
String[] streamNamesWithHeader,
long[] streamHeaderHandles);
private native void nativeAddPacketToInputStream(
long context, String streamName, long packet, long timestamp);
private native void nativeMovePacketToInputStream(
long context, String streamName, long packet, long timestamp);
private native void nativeSetGraphInputStreamBlockingMode(long context, boolean mode);
private native void nativeCloseInputStream(long context, String streamName);
private native void nativeCloseAllInputStreams(long context);
private native void nativeCloseAllPacketSources(long context);
private native void nativeWaitUntilGraphDone(long context);
private native void nativeWaitUntilGraphIdle(long context);
private native void nativeUpdatePacketReference(long referencePacket, long newPacket);
private native void nativeSetParentGlContext(long context, long javaGlContext);
private native void nativeCancelGraph(long context);
private native long nativeGetProfiler(long context);
}

View File

@ -0,0 +1,56 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Represents a synchronization point for OpenGL operations. It can be used to wait until the GPU
* has reached the specified point in the sequence of commands it is executing. This can be
* necessary when working with multiple GL contexts.
*/
final class GraphGlSyncToken implements GlSyncToken {
private long token;
@Override
public void waitOnCpu() {
if (token != 0) {
nativeWaitOnCpu(token);
}
}
@Override
public void waitOnGpu() {
if (token != 0) {
nativeWaitOnGpu(token);
}
}
@Override
public void release() {
if (token != 0) {
nativeRelease(token);
token = 0;
}
}
GraphGlSyncToken(long token) {
this.token = token;
}
private static native void nativeWaitOnCpu(long token);
private static native void nativeWaitOnGpu(long token);
private static native void nativeRelease(long token);
}

View File

@ -0,0 +1,97 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import com.google.common.base.Preconditions;
import com.google.mediapipe.proto.CalculatorProfileProto.CalculatorProfile;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.List;
/** MediaPipe Profiler Java API. */
public class GraphProfiler {
private final long nativeProfilerHandle;
private final Graph mediapipeGraph;
GraphProfiler(long nativeProfilerHandle, Graph mediapipeGraph) {
Preconditions.checkState(
nativeProfilerHandle != 0,
"Invalid profiler, tearDown() might have been called already.");
this.nativeProfilerHandle = nativeProfilerHandle;
this.mediapipeGraph = mediapipeGraph;
}
/**
* Resets all the calculator profilers in the graph. This only resets the information about
* Process() and does NOT affect information for Open() and Close() methods.
*/
public void reset() {
synchronized (mediapipeGraph) {
checkContext();
nativeReset(nativeProfilerHandle);
}
}
/** Resumes all the calculator profilers in the graph. No-op if already profiling. */
public void resume() {
synchronized (mediapipeGraph) {
checkContext();
nativeResume(nativeProfilerHandle);
}
}
/** Pauses all the calculator profilers in the graph. No-op if already paused. */
public void pause() {
synchronized (mediapipeGraph) {
checkContext();
nativePause(nativeProfilerHandle);
}
}
/**
* Collects the runtime profile for Open(), Process(), and Close() of each calculator in the
* graph. May be called at any time after the graph has been initialized.
*/
public List<CalculatorProfile> getCalculatorProfiles() {
synchronized (mediapipeGraph) {
checkContext();
byte[][] profileBytes = nativeGetCalculatorProfiles(nativeProfilerHandle);
List<CalculatorProfile> profileList = new ArrayList<>();
for (byte[] element : profileBytes) {
try {
CalculatorProfile profile = CalculatorProfile.parseFrom(element);
profileList.add(profile);
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
return profileList;
}
}
private void checkContext() {
Preconditions.checkState(
mediapipeGraph.getNativeHandle() != 0,
"Invalid context, tearDown() might have been called already.");
}
private native void nativeReset(long profilingContextHandle);
private native void nativeResume(long profilingContextHandle);
private native void nativePause(long profilingContextHandle);
private native byte[][] nativeGetCalculatorProfiles(long profilingContextHandle);
}

View File

@ -0,0 +1,30 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Implement this interface to wrap a native GraphService.
*
* <p>T should be the Java class wrapping the native service object.
*/
public interface GraphService<T> {
/**
* Provides the native service object corresponding to the provided Java object. This must be
* handled by calling mediapipe::android::GraphServiceHelper::SetServiceObject in native code,
* passing the provided context argument. We do it this way to minimize the number of trips
* through JNI and maintain more type safety in the native code.
*/
public void installServiceObject(long context, T object);
}

View File

@ -0,0 +1,95 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* A {@link TextureFrame} that represents a texture produced by MediaPipe.
*
* <p>The consumer is typically your application, which should therefore call the {@link #release()}
* method.
*/
public class GraphTextureFrame implements TextureFrame {
private long nativeBufferHandle;
// We cache these to be able to get them without a JNI call.
private int textureName;
private int width;
private int height;
private long timestamp = Long.MIN_VALUE;
GraphTextureFrame(long nativeHandle, long timestamp) {
nativeBufferHandle = nativeHandle;
// TODO: use a single JNI call to fill in all info
textureName = nativeGetTextureName(nativeBufferHandle);
width = nativeGetWidth(nativeBufferHandle);
height = nativeGetHeight(nativeBufferHandle);
this.timestamp = timestamp;
}
/** Returns the name of the underlying OpenGL texture. */
@Override
public int getTextureName() {
return textureName;
}
/** Returns the width of the underlying OpenGL texture. */
@Override
public int getWidth() {
return width;
}
/** Returns the height of the underlying OpenGL texture. */
@Override
public int getHeight() {
return height;
}
@Override
public long getTimestamp() {
return timestamp;
}
/**
* Releases a reference to the underlying buffer.
*
* <p>The consumer calls this when it is done using the texture.
*/
@Override
public void release() {
if (nativeBufferHandle != 0) {
nativeReleaseBuffer(nativeBufferHandle);
nativeBufferHandle = 0;
}
}
/**
* Releases a reference to the underlying buffer.
*
* <p>This form of the method is called when the consumer is MediaPipe itself. This can occur if a
* packet coming out of the graph is sent back into an input stream. Since both the producer and
* the consumer use the same context, we do not need to do further synchronization. Note: we do
* not currently support GPU sync across multiple graphs. TODO: Application consumers
* currently cannot create a GlSyncToken, so they cannot call this method.
*/
@Override
public void release(GlSyncToken syncToken) {
syncToken.release();
release();
}
private native void nativeReleaseBuffer(long nativeHandle);
private native int nativeGetTextureName(long nativeHandle);
private native int nativeGetWidth(long nativeHandle);
private native int nativeGetHeight(long nativeHandle);
}

View File

@ -0,0 +1,76 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
// Package java.nio.charset is not yet available in all Android apps.
import static com.google.common.base.Charsets.UTF_8;
/** This class represents an error reported by the MediaPipe framework. */
public class MediaPipeException extends RuntimeException {
public MediaPipeException(int statusCode, String statusMessage) {
super(StatusCode.values()[statusCode].description() + ": " + statusMessage);
this.statusCode = StatusCode.values()[statusCode];
this.statusMessage = statusMessage;
}
// Package base.Charsets is deprecated by package java.nio.charset is not
// yet available in all Android apps.
@SuppressWarnings("deprecation")
MediaPipeException(int code, byte[] message) {
this(code, new String(message, UTF_8));
}
public StatusCode getStatusCode() {
return statusCode;
}
public String getStatusMessage() {
return statusMessage;
}
/** The 17 canonical status codes. */
public enum StatusCode {
OK("ok"),
CANCELLED("canceled"),
UNKNOWN("unknown"),
INVALID_ARGUMENT("invalid argument"),
DEADLINE_EXCEEDED("deadline exceeded"),
NOT_FOUND("not found"),
ALREADY_EXISTS("already exists"),
PERMISSION_DENIED("permission denied"),
RESOURCE_EXHAUSTED("resource exhausted"),
FAILED_PRECONDITION("failed precondition"),
ABORTED("aborted"),
OUT_OF_RANGE("out of range"),
UNIMPLEMENTED("unimplemented"),
INTERNAL("internal"),
UNAVAILABLE("unavailable"),
DATA_LOSS("data loss"),
UNAUTHENTICATED("unauthenticated");
StatusCode(String description) {
this.description = description;
}
public String description() {
return description;
}
private final String description;
};
private final StatusCode statusCode;
private final String statusMessage;
}

View File

@ -0,0 +1,53 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import android.content.Context;
/** {@link MediaPipeRunner} is an abstract class for running MediaPipe graph in Android. */
public abstract class MediaPipeRunner extends Graph {
protected Context context;
public MediaPipeRunner(Context context) {
// Creates a singleton AssetCache.
AssetCache.create(context);
this.context = context;
}
public void loadBinaryGraphFromAsset(String assetPath) {
try {
this.loadBinaryGraph(AssetCache.getAssetCache().getAbsolutePathFromAsset(assetPath));
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
}
}
/**
* Starts running the graph.
*/
public abstract void start();
/**
* Pauses a running graph.
*/
public abstract void pause();
/**
* Resumes a paused graph.
*/
public abstract void resume();
/**
* Stops the running graph and releases the resource. Call this in Activity onDestroy callback.
*/
public abstract void release();
}

View File

@ -0,0 +1,85 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Java wrapper class for a native MediaPipe Packet.
*
* <p>To interpret the content of the packet, use {@link PacketGetter}. To create content of a
* packet, use {@link PacketCreator}. Java Packet should be released manually when no longer needed.
*
* <p>{@link Packet} can also be managed by {@link Graph}, which automatically releases all the
* packets in the context, however, we still need to be careful of the memory, and release them as
* soon as not needed.
*/
public class Packet {
// Points to a native Packet.
private long nativePacketHandle;
/**
* Creates a Java packet from a native mediapipe packet handle.
*
* @return A Packet from a native internal::PacketWithContext handle.
*/
public static Packet create(long nativeHandle) {
return new Packet(nativeHandle);
}
/**
* @return The native handle of the packet.
*/
public long getNativeHandle() {
return nativePacketHandle;
}
/** @return The timestamp of the Packet. */
public long getTimestamp() {
return nativeGetTimestamp(nativePacketHandle);
}
/**
* @return a shared copy of the Packet.
* <p>This is essentially increasing the reference count to the data encapsulated in the
* native mediapipe packet.
*/
public Packet copy() {
return new Packet(nativeCopyPacket(nativePacketHandle));
}
/**
* Releases the native allocation of the packet.
*
* <p>After the Graph for this packet is torn down, calling this will cause unexpected behavior.
* Since Graph tearDown will release all native memories of the Packets it holds.
*/
public void release() {
if (nativePacketHandle != 0) {
nativeReleasePacket(nativePacketHandle);
nativePacketHandle = 0;
}
}
// Packet is not intended to be constructed directly.
private Packet(long handle) {
nativePacketHandle = handle;
}
// Releases the native memeory.
private native void nativeReleasePacket(long packetHandle);
private native long nativeCopyPacket(long packetHandle);
private native long nativeGetTimestamp(long packetHandle);
}

View File

@ -0,0 +1,20 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/** Interface for MediaPipe callback with packet. */
public interface PacketCallback {
public void process(Packet packet);
}

View File

@ -0,0 +1,308 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import com.google.protobuf.MessageLite;
import java.nio.ByteBuffer;
// TODO: use Preconditions in this file.
/**
* Creates {@link Packet} in the given {@link Graph}.
*
* <p>This class provides a set of functions to create basic mediapipe packet types.
*/
public class PacketCreator {
protected Graph mediapipeGraph;
public PacketCreator(Graph context) {
mediapipeGraph = context;
}
/**
* Create a MediaPipe Packet that contains a pointer to another MediaPipe packet.
*
* <p>This can be used as a way to update the value of a packet. Similar to a mutable packet using
* mediapipe::AdoptAsUniquePtr.
*
* <p>The parameter {@code packet} can be released after this call, since the new packet already
* holds a reference to it in the native object.
*/
public Packet createReferencePacket(Packet packet) {
return Packet.create(
nativeCreateReferencePacket(mediapipeGraph.getNativeHandle(), packet.getNativeHandle()));
}
/**
* Creates a 3 channel RGB ImageFrame packet from an RGB buffer.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer. The pixel rows should have
* 4-byte alignment.
*/
public Packet createRgbImage(ByteBuffer buffer, int width, int height) {
int widthStep = (((width * 3) + 3) / 4) * 4;
if (widthStep * height != buffer.capacity()) {
throw new RuntimeException("The size of the buffer should be: " + widthStep * height);
}
return Packet.create(
nativeCreateRgbImage(mediapipeGraph.getNativeHandle(), buffer, width, height));
}
/**
* Create a MediaPipe audio packet that is used by most of the audio calculators.
*
* @param data the raw audio data, bytes per sample is 2.
* @param numChannels number of channels in the raw data.
* @param numSamples number of samples in the data.
*/
public Packet createAudioPacket(byte[] data, int numChannels, int numSamples) {
if (numChannels * numSamples * 2 != data.length) {
throw new RuntimeException("Data doesn't have the correct size.");
}
return Packet.create(
nativeCreateAudioPacket(mediapipeGraph.getNativeHandle(), data, numChannels, numSamples));
}
/**
* Creates a 3 channel RGB ImageFrame packet from an RGBA buffer.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
*/
public Packet createRgbImageFromRgba(ByteBuffer buffer, int width, int height) {
if (width * height * 4 != buffer.capacity()) {
throw new RuntimeException("The size of the buffer should be: " + width * height * 4);
}
return Packet.create(
nativeCreateRgbImageFromRgba(mediapipeGraph.getNativeHandle(), buffer, width, height));
}
/**
* Creates a 1 channel ImageFrame packet from an U8 buffer.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
*/
public Packet createGrayscaleImage(ByteBuffer buffer, int width, int height) {
if (width * height != buffer.capacity()) {
throw new RuntimeException(
"The size of the buffer should be: " + width * height + " but is " + buffer.capacity());
}
return Packet.create(
nativeCreateGrayscaleImage(mediapipeGraph.getNativeHandle(), buffer, width, height));
}
/**
* Creates a 4 channel RGBA ImageFrame packet from an RGBA buffer.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
*/
public Packet createRgbaImageFrame(ByteBuffer buffer, int width, int height) {
if (buffer.capacity() != width * height * 4) {
throw new RuntimeException("buffer doesn't have the correct size.");
}
return Packet.create(
nativeCreateRgbaImageFrame(mediapipeGraph.getNativeHandle(), buffer, width, height));
}
public Packet createInt16(short value) {
return Packet.create(nativeCreateInt16(mediapipeGraph.getNativeHandle(), value));
}
public Packet createInt32(int value) {
return Packet.create(nativeCreateInt32(mediapipeGraph.getNativeHandle(), value));
}
public Packet createInt64(long value) {
return Packet.create(nativeCreateInt64(mediapipeGraph.getNativeHandle(), value));
}
public Packet createFloat32(float value) {
return Packet.create(nativeCreateFloat32(mediapipeGraph.getNativeHandle(), value));
}
public Packet createFloat64(double value) {
return Packet.create(nativeCreateFloat64(mediapipeGraph.getNativeHandle(), value));
}
public Packet createBool(boolean value) {
return Packet.create(nativeCreateBool(mediapipeGraph.getNativeHandle(), value));
}
public Packet createString(String value) {
return Packet.create(nativeCreateString(mediapipeGraph.getNativeHandle(), value));
}
public Packet createInt16Vector(short[] data) {
throw new UnsupportedOperationException("Not implemented yet");
}
public Packet createInt32Vector(int[] data) {
throw new UnsupportedOperationException("Not implemented yet");
}
public Packet createInt64Vector(long[] data) {
throw new UnsupportedOperationException("Not implemented yet");
}
public Packet createFloat32Vector(float[] data) {
throw new UnsupportedOperationException("Not implemented yet");
}
public Packet createFloat64Vector(double[] data) {
throw new UnsupportedOperationException("Not implemented yet");
}
public Packet createInt32Array(int[] data) {
return Packet.create(nativeCreateInt32Array(mediapipeGraph.getNativeHandle(), data));
}
public Packet createFloat32Array(float[] data) {
return Packet.create(nativeCreateFloat32Array(mediapipeGraph.getNativeHandle(), data));
}
public Packet createByteArray(byte[] data) {
return Packet.create(nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), data));
}
/**
* Creates a VideoHeader to be used by the calculator that requires it.
*
* <p>Note: we are not populating frame rate and duration. If the calculator needs those values,
* the calculator is not suitable here. Modify the calculator to not require those values to work.
*/
public Packet createVideoHeader(int width, int height) {
return Packet.create(nativeCreateVideoHeader(mediapipeGraph.getNativeHandle(), width, height));
}
/**
* Creates a mediapipe::TimeSeriesHeader, which is used by many audio related calculators.
*
* @param numChannels number of audio channels.
* @param sampleRate sampling rate in Hertz.
*/
public Packet createTimeSeriesHeader(int numChannels, double sampleRate) {
return Packet.create(
nativeCreateTimeSeriesHeader(mediapipeGraph.getNativeHandle(), numChannels, sampleRate));
}
public Packet createMatrix(int rows, int cols, float[] data) {
return Packet.create(nativeCreateMatrix(mediapipeGraph.getNativeHandle(), rows, cols, data));
}
/** Creates a {@link Packet} containing the serialized proto string. */
public Packet createSerializedProto(MessageLite message) {
return Packet.create(
nativeCreateStringFromByteArray(mediapipeGraph.getNativeHandle(), message.toByteArray()));
}
/** Creates a {@link Packet} containing a {@code CalculatorOptions} proto message. */
public Packet createCalculatorOptions(MessageLite message) {
return Packet.create(
nativeCreateCalculatorOptions(mediapipeGraph.getNativeHandle(), message.toByteArray()));
}
/** Creates a {@link Packet} containing the given camera intrinsics. */
public Packet createCameraIntrinsics(
float fx, float fy, float cx, float cy, float width, float height) {
return Packet.create(
nativeCreateCameraIntrinsics(
mediapipeGraph.getNativeHandle(), fx, fy, cx, cy, width, height));
}
/**
* Creates a mediapipe::GpuBuffer with the specified texture name and dimensions.
*
* @param name the OpenGL texture name.
* @param width the width in pixels.
* @param height the height in pixels.
* @param releaseCallback a callback to be invoked when the mediapipe::GpuBuffer is released. Can be
* null.
*/
public Packet createGpuBuffer(
int name, int width, int height, TextureReleaseCallback releaseCallback) {
return Packet.create(
nativeCreateGpuBuffer(
mediapipeGraph.getNativeHandle(), name, width, height, releaseCallback));
}
/**
* Creates a mediapipe::GpuBuffer with the specified texture name and dimensions.
*
* @param name the OpenGL texture name.
* @param width the width in pixels.
* @param height the height in pixels.
* @deprecated use {@link #createGpuBuffer(int,int,int,TextureReleaseCallback)} instead.
*/
@Deprecated
public Packet createGpuBuffer(int name, int width, int height) {
return Packet.create(
nativeCreateGpuBuffer(mediapipeGraph.getNativeHandle(), name, width, height, null));
}
/**
* Creates a mediapipe::GpuBuffer with the provided {@link TextureFrame}.
*
* <p>Note: in order for MediaPipe to be able to access the texture, the application's GL context
* must be linked with MediaPipe's. This is ensured by calling {@link
* Graph#createGlRunner(String,long)} with the native handle to the application's GL context as
* the second argument.
*/
public Packet createGpuBuffer(TextureFrame frame) {
return Packet.create(
nativeCreateGpuBuffer(
mediapipeGraph.getNativeHandle(),
frame.getTextureName(),
frame.getWidth(),
frame.getHeight(),
frame));
}
/** Helper callback adaptor to create the Java {@link GlSyncToken}. This is called by JNI code. */
private void releaseWithSyncToken(long nativeSyncToken, TextureReleaseCallback releaseCallback) {
releaseCallback.release(new GraphGlSyncToken(nativeSyncToken));
}
private native long nativeCreateReferencePacket(long context, long packet);
private native long nativeCreateRgbImage(long context, ByteBuffer buffer, int width, int height);
private native long nativeCreateAudioPacket(
long context, byte[] data, int numChannels, int numSamples);
private native long nativeCreateRgbImageFromRgba(
long context, ByteBuffer buffer, int width, int height);
private native long nativeCreateGrayscaleImage(
long context, ByteBuffer buffer, int width, int height);
private native long nativeCreateRgbaImageFrame(
long context, ByteBuffer buffer, int width, int height);
private native long nativeCreateInt16(long context, short value);
private native long nativeCreateInt32(long context, int value);
private native long nativeCreateInt64(long context, long value);
private native long nativeCreateFloat32(long context, float value);
private native long nativeCreateFloat64(long context, double value);
private native long nativeCreateBool(long context, boolean value);
private native long nativeCreateString(long context, String value);
private native long nativeCreateVideoHeader(long context, int width, int height);
private native long nativeCreateTimeSeriesHeader(
long context, int numChannels, double sampleRate);
private native long nativeCreateMatrix(long context, int rows, int cols, float[] data);
private native long nativeCreateGpuBuffer(
long context, int name, int width, int height, TextureReleaseCallback releaseCallback);
private native long nativeCreateInt32Array(long context, int[] data);
private native long nativeCreateFloat32Array(long context, float[] data);
private native long nativeCreateStringFromByteArray(long context, byte[] data);
private native long nativeCreateCalculatorOptions(long context, byte[] data);
private native long nativeCreateCameraIntrinsics(
long context, float fx, float fy, float cx, float cy, float width, float height);
}

View File

@ -0,0 +1,303 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import com.google.common.flogger.FluentLogger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
/**
* Converts the {@link Packet} to java accessible data types.
*
* <p>{@link Packet} is a thin java wrapper for the native MediaPipe packet. This class provides the
* extendable conversion needed to access the data in the packet.
*
* <p>Note that it is still the developer's responsibility to interpret the data correctly.
*/
public final class PacketGetter {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
/** Helper class for a list of exactly two Packets. */
public static class PacketPair {
public PacketPair(Packet first, Packet second) {
this.first = first;
this.second = second;
}
final Packet first;
final Packet second;
}
/**
* Returns the {@link Packet} that held in the reference packet.
*
* <p>Note: release the returned packet after use.
*/
public static Packet getPacketFromReference(final Packet referencePacket) {
return Packet.create(nativeGetPacketFromReference(referencePacket.getNativeHandle()));
}
/**
* The {@link Packet} contains a pair of packets, return both of them.
*
* <p>Note: release the packets in the pair after use.
*
* @param packet A MediaPipe packet that contains a pair of packets.
*/
public static PacketPair getPairOfPackets(final Packet packet) {
long[] handles = nativeGetPairPackets(packet.getNativeHandle());
return new PacketPair(Packet.create(handles[0]), Packet.create(handles[1]));
}
/**
* Returns a list of packets that are contained in The {@link Packet}.
*
* <p>Note: release the packets in the list after use.
*
* @param packet A MediaPipe packet that contains a vector of packets.
*/
public static List<Packet> getVectorOfPackets(final Packet packet) {
long[] handles = nativeGetVectorPackets(packet.getNativeHandle());
List<Packet> packets = new ArrayList<>(handles.length);
for (long handle : handles) {
packets.add(Packet.create(handle));
}
return packets;
}
public static short getInt16(final Packet packet) {
return nativeGetInt16(packet.getNativeHandle());
}
public static int getInt32(final Packet packet) {
return nativeGetInt32(packet.getNativeHandle());
}
public static long getInt64(final Packet packet) {
return nativeGetInt64(packet.getNativeHandle());
}
public static float getFloat32(final Packet packet) {
return nativeGetFloat32(packet.getNativeHandle());
}
public static double getFloat64(final Packet packet) {
return nativeGetFloat64(packet.getNativeHandle());
}
public static boolean getBool(final Packet packet) {
return nativeGetBool(packet.getNativeHandle());
}
public static String getString(final Packet packet) {
return nativeGetString(packet.getNativeHandle());
}
public static byte[] getBytes(final Packet packet) {
return nativeGetBytes(packet.getNativeHandle());
}
public static byte[] getProtoBytes(final Packet packet) {
return nativeGetProtoBytes(packet.getNativeHandle());
}
public static short[] getInt16Vector(final Packet packet) {
return nativeGetInt16Vector(packet.getNativeHandle());
}
public static int[] getInt32Vector(final Packet packet) {
return nativeGetInt32Vector(packet.getNativeHandle());
}
public static long[] getInt64Vector(final Packet packet) {
return nativeGetInt64Vector(packet.getNativeHandle());
}
public static float[] getFloat32Vector(final Packet packet) {
return nativeGetFloat32Vector(packet.getNativeHandle());
}
public static double[] getFloat64Vector(final Packet packet) {
return nativeGetFloat64Vector(packet.getNativeHandle());
}
public static int getImageWidth(final Packet packet) {
return nativeGetImageWidth(packet.getNativeHandle());
}
public static int getImageHeight(final Packet packet) {
return nativeGetImageHeight(packet.getNativeHandle());
}
/**
* Returns the native image buffer in ByteBuffer. It assumes the output buffer stores pixels
* contiguously. It returns false if this assumption does not hold.
*
* <p>Note: this function does not assume the pixel format.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
*/
public static boolean getImageData(final Packet packet, ByteBuffer buffer) {
return nativeGetImageData(packet.getNativeHandle(), buffer);
}
/**
* Converts an RGB mediapipe image frame packet to an RGBA Byte buffer.
*
* <p>Use {@link ByteBuffer#allocateDirect} when allocating the buffer.
*/
public static boolean getRgbaFromRgb(final Packet packet, ByteBuffer buffer) {
return nativeGetRgbaFromRgb(packet.getNativeHandle(), buffer);
}
/**
* Converts the audio matrix data back into byte data.
*
* <p>The matrix is in column major order.
*/
public static byte[] getAudioByteData(final Packet packet) {
return nativeGetAudioData(packet.getNativeHandle());
}
/**
* Audio data is in MediaPipe Matrix format.
*
* @return the number of channels in the data.
*/
public static int getAudioDataNumChannels(final Packet packet) {
return nativeGetMatrixRows(packet.getNativeHandle());
}
/**
* Audio data is in MediaPipe Matrix format.
*
* @return the number of samples in the data.
*/
public static int getAudioDataNumSamples(final Packet packet) {
return nativeGetMatrixCols(packet.getNativeHandle());
}
/**
* In addition to the data packet, mediapipe currently also has a separate audio header: {@code
* mediapipe::TimeSeriesHeader}.
*
* @return the number of channel in the header packet.
*/
public static int getTimeSeriesHeaderNumChannels(final Packet packet) {
return nativeGetTimeSeriesHeaderNumChannels(packet.getNativeHandle());
}
/**
* In addition to the data packet, mediapipe currently also has a separate audio header: {@code
* mediapipe::TimeSeriesHeader}.
*
* @return the sampling rate in the header packet.
*/
public static double getTimeSeriesHeaderSampleRate(final Packet packet) {
return nativeGetTimeSeriesHeaderSampleRate(packet.getNativeHandle());
}
/** Gets the width in video header packet. */
public static int getVideoHeaderWidth(final Packet packet) {
return nativeGetVideoHeaderWidth(packet.getNativeHandle());
}
/** Gets the height in video header packet. */
public static int getVideoHeaderHeight(final Packet packet) {
return nativeGetVideoHeaderHeight(packet.getNativeHandle());
}
/**
* Returns the float array data of the mediapipe Matrix.
*
* <p>Underlying packet stores the matrix as {@code ::mediapipe::Matrix}.
*/
public static float[] getMatrixData(final Packet packet) {
return nativeGetMatrixData(packet.getNativeHandle());
}
public static int getMatrixRows(final Packet packet) {
return nativeGetMatrixRows(packet.getNativeHandle());
}
public static int getMatrixCols(final Packet packet) {
return nativeGetMatrixCols(packet.getNativeHandle());
}
/**
* Returns the GL texture name of the mediapipe::GpuBuffer.
*
* @deprecated use {@link #getTextureFrame} instead.
*/
@Deprecated
public static int getGpuBufferName(final Packet packet) {
return nativeGetGpuBufferName(packet.getNativeHandle());
}
/**
* Returns a {@link GraphTextureFrame} referencing a C++ mediapipe::GpuBuffer.
*
* <p>Note: in order for the application to be able to use the texture, its GL context must be
* linked with MediaPipe's. This is ensured by calling {@link Graph#createGlRunner(String,long)}
* with the native handle to the application's GL context as the second argument.
*/
public static GraphTextureFrame getTextureFrame(final Packet packet) {
return new GraphTextureFrame(
nativeGetGpuBuffer(packet.getNativeHandle()), packet.getTimestamp());
}
private static native long nativeGetPacketFromReference(long nativePacketHandle);
private static native long[] nativeGetPairPackets(long nativePacketHandle);
private static native long[] nativeGetVectorPackets(long nativePacketHandle);
private static native short nativeGetInt16(long nativePacketHandle);
private static native int nativeGetInt32(long nativePacketHandle);
private static native long nativeGetInt64(long nativePacketHandle);
private static native float nativeGetFloat32(long nativePacketHandle);
private static native double nativeGetFloat64(long nativePacketHandle);
private static native boolean nativeGetBool(long nativePacketHandle);
private static native String nativeGetString(long nativePacketHandle);
private static native byte[] nativeGetBytes(long nativePacketHandle);
private static native byte[] nativeGetProtoBytes(long nativePacketHandle);
private static native short[] nativeGetInt16Vector(long nativePacketHandle);
private static native int[] nativeGetInt32Vector(long nativePacketHandle);
private static native long[] nativeGetInt64Vector(long nativePacketHandle);
private static native float[] nativeGetFloat32Vector(long nativePacketHandle);
private static native double[] nativeGetFloat64Vector(long nativePacketHandle);
private static native int nativeGetImageWidth(long nativePacketHandle);
private static native int nativeGetImageHeight(long nativePacketHandle);
private static native boolean nativeGetImageData(long nativePacketHandle, ByteBuffer buffer);
private static native boolean nativeGetRgbaFromRgb(long nativePacketHandle, ByteBuffer buffer);
// Retrieves the values that are in the VideoHeader.
private static native int nativeGetVideoHeaderWidth(long nativepackethandle);
private static native int nativeGetVideoHeaderHeight(long nativepackethandle);
// Retrieves the values that are in the mediapipe::TimeSeriesHeader.
private static native int nativeGetTimeSeriesHeaderNumChannels(long nativepackethandle);
private static native double nativeGetTimeSeriesHeaderSampleRate(long nativepackethandle);
// Audio data in MediaPipe current uses MediaPipe Matrix format type.
private static native byte[] nativeGetAudioData(long nativePacketHandle);
// Native helper functions to access the MediaPipe Matrix data.
private static native float[] nativeGetMatrixData(long nativePacketHandle);
private static native int nativeGetMatrixRows(long nativePacketHandle);
private static native int nativeGetMatrixCols(long nativePacketHandle);
private static native int nativeGetGpuBufferName(long nativePacketHandle);
private static native long nativeGetGpuBuffer(long nativePacketHandle);
private PacketGetter() {}
}

View File

@ -0,0 +1,20 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/** Interface for MediaPipe callback with packet and packet header. */
public interface PacketWithHeaderCallback {
public void process(Packet packet, Packet packetHeader);
}

View File

@ -0,0 +1,79 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
import javax.annotation.Nullable;
/**
* Outputs a MediaPipe video stream to an {@link android.opengl.EGLSurface}.
*
* <p>Should be created using {@link Graph#addEglSurfaceOutput}.
*/
public class SurfaceOutput {
private Packet surfaceHolderPacket;
private Graph mediapipeGraph;
SurfaceOutput(Graph context, Packet holderPacket) {
mediapipeGraph = context;
surfaceHolderPacket = holderPacket;
}
/**
* Sets vertical flipping of the output surface, useful for conversion between coordinate systems
* with top-left v.s. bottom-left origins. This should be called before {@link
* #setSurface(Object)} or {@link #setEglSurface(long)}.
*/
public void setFlipY(boolean flip) {
nativeSetFlipY(surfaceHolderPacket.getNativeHandle(), flip);
}
/**
* Connects an Android {@link Surface} to an output.
*
* <p>This creates the requisite {@link EGLSurface} internally. If one has already been created
* for this Surface outside of MediaPipe, the call will fail.
*
* <p>Note that a given Surface can only be connected to one output. If you wish to move it to a
* different output, first call {@code setSurface(null)} on the old output.
*
* @param surface The surface to connect. Can be {@code null}.
*/
public void setSurface(@Nullable Object surface) {
nativeSetSurface(
mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), surface);
}
/**
* Connects an EGL surface to an output.
*
* <p>NOTE: The surface needs to be compatible with the GL context used by MediaPipe. In practice
* this means the EGL context that created the surface should use the same config as used by the
* MediaPipe GL context, otherwise the surface sink calculator will fail with {@code
* EGL_BAD_MATCH}.
*
* @param nativeEglSurface Native handle to the egl surface.
*/
public void setEglSurface(long nativeEglSurface) {
nativeSetEglSurface(
mediapipeGraph.getNativeHandle(), surfaceHolderPacket.getNativeHandle(), nativeEglSurface);
}
private native void nativeSetFlipY(long nativePacket, boolean flip);
private native void nativeSetSurface(
long nativeContext, long nativePacket, Object surface);
private native void nativeSetEglSurface(
long nativeContext, long nativePacket, long nativeEglSurface);
}

View File

@ -0,0 +1,62 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* Interface for a video frame that can be accessed as a texture.
*
* <p>This interface defines a producer/consumer relationship between the component that originates
* the TextureFrame and the component that receives it. The consumer <b>must</b> call {@link
* #release()} when it is done using the frame. This gives the producer the opportunity to recycle
* the resource.
*
* <p>When your application sends a TextureFrame into a MediaPipe graph, the application is the
* producer and MediaPipe is the consumer. MediaPipe will call the release() method when all copies
* of the packet holding the texture have been destroyed.
*
* <p>When MediaPipe sends a TextureFrame to the application, MediaPipe is the producer and the
* application is the consumer. The application should call the release() method.
*
* <p>You can also send a TextureFrame from a component of your application to another. In this
* case, the receiving component is the consumer, and should call release(). This can be useful, for
* instance, if your application requires a "raw" mode where frames are sent directly from the video
* source to the renderer, bypassing MediaPipe.
*/
public interface TextureFrame extends TextureReleaseCallback {
/** The OpenGL name of the texture. */
int getTextureName();
/** Width of the frame in pixels. */
int getWidth();
/** Height of the frame in pixels. */
int getHeight();
/** The presentation time of the frame in microseconds **/
long getTimestamp();
/**
* The consumer that receives this TextureFrame must call this method to inform the provider that
* it is done with it.
*/
void release();
/**
* If this texture is provided to MediaPipe, this method will be called when it is released. The
* {@link GlSyncToken} can be used to wait for the GPU to be entirely done reading the texture.
*/
@Override
void release(GlSyncToken syncToken);
}

View File

@ -0,0 +1,27 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.framework;
/**
* A callback that gets invoked when a texture is no longer in use.
*/
public interface TextureReleaseCallback {
/**
* Called when the texture has been released. The sync token can be used to ensure that the GPU is
* done reading from it. Implementations of this interface should release the token once they are
* done with it.
*/
void release(GlSyncToken syncToken);
}

View File

@ -0,0 +1,143 @@
# Copyright 2019 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"]) # Apache 2.0
package(
default_visibility = ["//visibility:public"],
features = ["no_layering_check"],
)
alias(
name = "mediapipe_android_framework_jni",
actual = ":mediapipe_framework_jni",
)
cc_library(
name = "mediapipe_framework_jni",
srcs = [
"compat_jni.cc",
"graph.cc",
"graph_jni.cc",
"graph_service_jni.cc",
"packet_context_jni.cc",
"packet_creator_jni.cc",
"packet_getter_jni.cc",
"graph_profiler_jni.cc",
] + select({
"//conditions:default": [],
"//mediapipe:android": [
"android_asset_util_jni.cc",
"android_packet_creator_jni.cc",
],
}) + select({
"//conditions:default": [
"graph_gl_sync_token.cc",
"graph_texture_frame_jni.cc",
"surface_output_jni.cc",
],
"//mediapipe/gpu:disable_gpu": [],
}),
hdrs = [
"colorspace.h",
"compat_jni.h",
"graph.h",
"graph_jni.h",
"graph_service_jni.h",
"packet_context_jni.h",
"packet_creator_jni.h",
"packet_getter_jni.h",
"graph_profiler_jni.h",
] + select({
"//conditions:default": [],
"//mediapipe:android": [
"android_asset_util_jni.h",
"android_packet_creator_jni.h",
],
}) + select({
"//conditions:default": [
"graph_gl_sync_token.h",
"graph_texture_frame_jni.h",
"surface_output_jni.h",
],
"//mediapipe/gpu:disable_gpu": [],
}),
linkopts = select({
"//conditions:default": [],
"//mediapipe:android": [
"-ljnigraphics",
"-lEGL", # This is needed by compat_jni even if GPU is disabled.
],
}),
visibility = ["//visibility:public"],
deps = [
":jni_util",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_profile_cc_proto",
"//mediapipe/framework/tool:calculator_graph_template_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework/formats:matrix_data_cc_proto",
"//mediapipe/framework/formats:time_series_header_cc_proto",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/synchronization",
"@eigen_archive//:eigen",
"//mediapipe/framework:camera_intrinsics",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/formats:video_stream_header",
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
"//mediapipe/framework/tool:name_util",
"//mediapipe/framework/tool:executor_util",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:threadpool",
"//mediapipe/framework/port:singleton",
"//mediapipe/framework/port:status",
] + select({
"//conditions:default": [
"//mediapipe/framework/port:file_helpers",
],
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
"//mediapipe/util/android:asset_manager_util",
],
}) + select({
"//conditions:default": [
"//mediapipe/gpu:gl_quad_renderer",
"//mediapipe/gpu:gl_calculator_helper",
"//mediapipe/gpu:gl_surface_sink_calculator",
"//mediapipe/gpu:gpu_shared_data_internal",
"//mediapipe/gpu:graph_support",
],
"//mediapipe/gpu:disable_gpu": [
"//mediapipe/gpu:gpu_shared_data_internal",
],
}),
alwayslink = 1,
)
cc_library(
name = "jni_util",
srcs = (["jni_util.cc"]),
hdrs = (["jni_util.h"]),
deps = [
"@com_google_absl//absl/synchronization",
"//mediapipe/framework/port:logging",
] + select({
"//conditions:default": [
],
"//mediapipe:android": [
],
}),
)

View File

@ -0,0 +1,33 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/android_asset_util_jni.h"
#include <memory>
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/framework/port/singleton.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
#include "mediapipe/util/android/asset_manager_util.h"
JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD(
nativeInitializeAssetManager)(JNIEnv* env, jclass clz,
jobject android_context,
jstring cache_dir_path) {
mediapipe::AssetManager* asset_manager =
Singleton<mediapipe::AssetManager>::get();
return asset_manager->InitializeFromActivity(
env, android_context,
mediapipe::android::JStringToStdString(env, cache_dir_path));
}

View File

@ -0,0 +1,36 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define ANDROID_ASSET_UTIL_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_AndroidAssetUtil_##METHOD_NAME
JNIEXPORT jboolean JNICALL ANDROID_ASSET_UTIL_METHOD(
nativeInitializeAssetManager)(JNIEnv* env, jclass clz,
jobject android_context,
jstring cache_dir_path);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_ASSET_UTIL_JNI_H_

View File

@ -0,0 +1,117 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.h"
#include <android/bitmap.h>
#include <cstring>
#include <memory>
#include "absl/memory/memory.h"
#include "mediapipe/framework/formats/image_format.pb.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
namespace {
// Creates a new internal::PacketWithContext object, and returns the native
// handle.
int64_t CreatePacketWithContext(jlong context,
const mediapipe::Packet& packet) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
return mediapipe_graph->WrapPacketIntoContext(packet);
}
} // namespace
JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context,
jobject bitmap) {
AndroidBitmapInfo info;
int result = AndroidBitmap_getInfo(env, bitmap, &info);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result;
return 0L;
}
if (info.stride != info.width * 4) {
LOG(ERROR) << "Bitmap stride: " << info.stride
<< "is not equal to 4 times bitmap width: " << info.width;
return 0L;
}
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, info.width, info.height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
void* pixel_addr = nullptr;
result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code "
<< result;
return 0L;
}
const uint8_t* rgba_data = static_cast<uint8_t*>(pixel_addr);
mediapipe::android::RgbaToRgb(rgba_data, info.stride, info.width, info.height,
image_frame->MutablePixelData(),
image_frame->WidthStep());
result = AndroidBitmap_unlockPixels(env, bitmap);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code "
<< result;
return 0L;
}
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context,
jobject bitmap) {
AndroidBitmapInfo info;
int result = AndroidBitmap_getInfo(env, bitmap, &info);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result;
return 0L;
}
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGBA, info.width, info.height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
int64_t buffer_size = info.stride * info.height;
if (buffer_size != image_frame->PixelDataSize()) {
LOG(ERROR) << "Bitmap stride: " << info.stride
<< " times bitmap height: " << info.height
<< " is not equal to the expected size: "
<< image_frame->PixelDataSize();
return 0L;
}
void* pixel_addr = nullptr;
result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code "
<< result;
return 0L;
}
std::memcpy(image_frame->MutablePixelData(), pixel_addr,
image_frame->PixelDataSize());
result = AndroidBitmap_unlockPixels(env, bitmap);
if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code "
<< result;
return 0L;
}
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}

View File

@ -0,0 +1,39 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define ANDROID_PACKET_CREATOR_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_AndroidPacketCreator_##METHOD_NAME
JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
nativeCreateRgbImageFrame)(JNIEnv* env, jobject thiz, jlong context,
jobject bitmap);
JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(
nativeCreateRgbaImageFrame)(JNIEnv* env, jobject thiz, jlong context,
jobject bitmap);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_ANDROID_PACKET_CREATOR_JNI_H_

View File

@ -0,0 +1,60 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_
#include <cstdint>
namespace mediapipe {
namespace android {
// TODO: switch to more efficient implementation, like halide later.
// Converts an RGBA image to RGB
inline void RgbaToRgb(const uint8_t* rgba_img, int rgba_width_step, int width,
int height, uint8_t* rgb_img, int rgb_width_step) {
for (int y = 0; y < height; ++y) {
const auto* rgba = rgba_img + y * rgba_width_step;
auto* rgb = rgb_img + y * rgb_width_step;
for (int x = 0; x < width; ++x) {
*rgb = *rgba;
*(rgb + 1) = *(rgba + 1);
*(rgb + 2) = *(rgba + 2);
rgb += 3;
rgba += 4;
}
}
}
// Converts a RGB image to RGBA
inline void RgbToRgba(const uint8_t* rgb_img, int rgb_width_step, int width,
int height, uint8_t* rgba_img, int rgba_width_step,
uint8_t alpha) {
for (int y = 0; y < height; ++y) {
const auto* rgb = rgb_img + y * rgb_width_step;
auto* rgba = rgba_img + y * rgba_width_step;
for (int x = 0; x < width; ++x) {
*rgba = *rgb;
*(rgba + 1) = *(rgb + 1);
*(rgba + 2) = *(rgb + 2);
*(rgba + 3) = alpha;
rgb += 3;
rgba += 4;
}
}
}
} // namespace android
} // namespace mediapipe
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COLORSPACE_H_

View File

@ -0,0 +1,27 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/compat_jni.h"
#include <EGL/egl.h>
JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env,
jclass clz) {
return reinterpret_cast<jlong>(eglGetCurrentContext());
}
JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)(
JNIEnv* env, jclass clz, jint readdraw) {
return reinterpret_cast<jlong>(eglGetCurrentSurface(readdraw));
}

View File

@ -0,0 +1,37 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define COMPAT_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_Compat_##METHOD_NAME
JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLContext)(JNIEnv* env,
jclass clz);
JNIEXPORT jlong JNICALL COMPAT_METHOD(getCurrentNativeEGLSurface)(
JNIEnv* env, jclass clz, jint readdraw);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_COMPAT_JNI_H_

View File

@ -0,0 +1,600 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
#include <pthread.h>
#include <vector>
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/synchronization/mutex.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/framework/port/proto_ns.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/framework/port/threadpool.h"
#include "mediapipe/framework/tool/executor_util.h"
#include "mediapipe/framework/tool/name_util.h"
#include "mediapipe/gpu/gpu_shared_data_internal.h"
#include "mediapipe/gpu/graph_support.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_context_jni.h"
#ifdef __ANDROID__
#include "mediapipe/util/android/file/base/helpers.h"
#else
#include "mediapipe/framework/port/file_helpers.h"
#endif // __ANDROID__
#ifndef MEDIAPIPE_DISABLE_GPU
#include "mediapipe/gpu/egl_surface_holder.h"
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
namespace mediapipe {
namespace android {
namespace internal {
// PacketWithContext is the native counterpart of the Java Packet.
class PacketWithContext {
public:
PacketWithContext(Graph* context, const Packet& packet)
: context_(context), packet_(packet) {}
~PacketWithContext() {}
Graph* GetContext() { return context_; }
Packet& packet() { return packet_; }
private:
Graph* context_;
Packet packet_;
};
// A callback handler that wraps the java callback, and submits it for
// execution through Graph.
class CallbackHandler {
public:
CallbackHandler(Graph* context, jobject callback)
: context_(context), java_callback_(callback) {}
~CallbackHandler() {
// The jobject global reference is managed by the Graph directly.
// So no-op here.
if (java_callback_) {
LOG(ERROR) << "Java callback global reference is not released.";
}
}
void PacketCallback(const Packet& packet) {
context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_,
packet);
}
void PacketWithHeaderCallback(const Packet& packet, const Packet& header) {
context_->CallbackToJava(mediapipe::java::GetJNIEnv(), java_callback_,
packet, header);
}
std::function<void(const Packet&)> CreateCallback() {
return std::bind(&CallbackHandler::PacketCallback, this,
std::placeholders::_1);
}
std::function<void(const Packet&, const Packet&)> CreateCallbackWithHeader() {
return std::bind(&CallbackHandler::PacketWithHeaderCallback, this,
std::placeholders::_1, std::placeholders::_2);
}
// Releases the global reference to the java callback object.
// This is called by the Graph, since releasing of a jni object
// requires JNIEnv object that we can not keep a copy of.
void ReleaseCallback(JNIEnv* env) {
env->DeleteGlobalRef(java_callback_);
java_callback_ = nullptr;
}
private:
Graph* context_;
// java callback object
jobject java_callback_;
};
} // namespace internal
Graph::Graph()
: executor_stack_size_increased_(false), global_java_packet_cls_(nullptr) {}
Graph::~Graph() {
if (running_graph_) {
running_graph_->Cancel();
running_graph_->WaitUntilDone().IgnoreError();
}
// Cleans up the jni objects.
JNIEnv* env = mediapipe::java::GetJNIEnv();
if (env == nullptr) {
LOG(ERROR) << "Can't attach to java thread, no jni clean up performed.";
return;
}
for (const auto& handler : callback_handlers_) {
handler->ReleaseCallback(env);
}
if (global_java_packet_cls_) {
env->DeleteGlobalRef(global_java_packet_cls_);
global_java_packet_cls_ = nullptr;
}
}
int64_t Graph::WrapPacketIntoContext(const Packet& packet) {
absl::MutexLock lock(&all_packets_mutex_);
auto packet_context = new internal::PacketWithContext(this, packet);
// Since the value of the all_packets_ map is a unique_ptr, resets it with the
// new allocated object.
all_packets_[packet_context].reset(packet_context);
VLOG(2) << "Graph packet reference buffer size: " << all_packets_.size();
return reinterpret_cast<int64_t>(packet_context);
}
// static
Packet Graph::GetPacketFromHandle(int64_t packet_handle) {
internal::PacketWithContext* packet_with_context =
reinterpret_cast<internal::PacketWithContext*>(packet_handle);
return packet_with_context->packet();
}
// static
Graph* Graph::GetContextFromHandle(int64_t packet_handle) {
internal::PacketWithContext* packet_with_context =
reinterpret_cast<internal::PacketWithContext*>(packet_handle);
return packet_with_context->GetContext();
}
// static
bool Graph::RemovePacket(int64_t packet_handle) {
internal::PacketWithContext* packet_with_context =
reinterpret_cast<internal::PacketWithContext*>(packet_handle);
Graph* context = packet_with_context->GetContext();
absl::MutexLock lock(&(context->all_packets_mutex_));
return context->all_packets_.erase(packet_with_context) != 0;
}
void Graph::EnsureMinimumExecutorStackSizeForJava() {}
::mediapipe::Status Graph::AddCallbackHandler(std::string output_stream_name,
jobject java_callback) {
if (!graph_config()) {
return ::mediapipe::InternalError("Graph is not loaded!");
}
std::unique_ptr<internal::CallbackHandler> handler(
new internal::CallbackHandler(this, java_callback));
std::string side_packet_name;
tool::AddCallbackCalculator(output_stream_name, graph_config(),
&side_packet_name,
/* use_std_function = */ true);
EnsureMinimumExecutorStackSizeForJava();
side_packets_callbacks_.emplace(
side_packet_name, MakePacket<std::function<void(const Packet&)>>(
handler->CreateCallback()));
callback_handlers_.emplace_back(std::move(handler));
return ::mediapipe::OkStatus();
}
::mediapipe::Status Graph::AddCallbackWithHeaderHandler(
std::string output_stream_name, jobject java_callback) {
if (!graph_config()) {
return ::mediapipe::InternalError("Graph is not loaded!");
}
std::unique_ptr<internal::CallbackHandler> handler(
new internal::CallbackHandler(this, java_callback));
std::string side_packet_name;
tool::AddCallbackWithHeaderCalculator(output_stream_name, output_stream_name,
graph_config(), &side_packet_name,
/* use_std_function = */ true);
EnsureMinimumExecutorStackSizeForJava();
side_packets_callbacks_.emplace(
side_packet_name,
MakePacket<std::function<void(const Packet&, const Packet&)>>(
handler->CreateCallbackWithHeader()));
callback_handlers_.emplace_back(std::move(handler));
return ::mediapipe::OkStatus();
}
int64_t Graph::AddSurfaceOutput(const std::string& output_stream_name) {
if (!graph_config()) {
LOG(ERROR) << "Graph is not loaded!";
return 0;
}
#ifdef MEDIAPIPE_DISABLE_GPU
LOG(FATAL) << "GPU support has been disabled in this build!";
#else
CalculatorGraphConfig::Node* sink_node = graph_config()->add_node();
sink_node->set_name(::mediapipe::tool::GetUnusedNodeName(
*graph_config(), absl::StrCat("egl_surface_sink_", output_stream_name)));
sink_node->set_calculator("GlSurfaceSinkCalculator");
sink_node->add_input_stream(output_stream_name);
sink_node->add_input_side_packet(
absl::StrCat(kGpuSharedTagName, ":", kGpuSharedSidePacketName));
const std::string input_side_packet_name =
::mediapipe::tool::GetUnusedSidePacketName(
*graph_config(), absl::StrCat(output_stream_name, "_surface"));
sink_node->add_input_side_packet(
absl::StrCat("SURFACE:", input_side_packet_name));
auto it_inserted = output_surface_side_packets_.emplace(
input_side_packet_name,
AdoptAsUniquePtr(new mediapipe::EglSurfaceHolder()));
return WrapPacketIntoContext(it_inserted.first->second);
#endif // defined(MEDIAPIPE_DISABLE_GPU)
}
::mediapipe::Status Graph::LoadBinaryGraph(std::string path_to_graph) {
std::string graph_config_string;
::mediapipe::Status status =
mediapipe::file::GetContents(path_to_graph, &graph_config_string);
if (!status.ok()) {
return status;
}
return LoadBinaryGraph(graph_config_string.c_str(),
graph_config_string.length());
}
::mediapipe::Status Graph::LoadBinaryGraph(const char* data, int size) {
CalculatorGraphConfig graph_config;
if (!graph_config.ParseFromArray(data, size)) {
return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
}
graph_configs_.push_back(graph_config);
return ::mediapipe::OkStatus();
}
::mediapipe::Status Graph::LoadBinaryGraphTemplate(const char* data, int size) {
CalculatorGraphTemplate graph_template;
if (!graph_template.ParseFromArray(data, size)) {
return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
}
graph_templates_.push_back(graph_template);
return ::mediapipe::OkStatus();
}
::mediapipe::Status Graph::SetGraphType(std::string graph_type) {
graph_type_ = graph_type;
return ::mediapipe::OkStatus();
}
::mediapipe::Status Graph::SetGraphOptions(const char* data, int size) {
if (!graph_options_.ParseFromArray(data, size)) {
return ::mediapipe::InvalidArgumentError("Failed to parse the graph");
}
return ::mediapipe::OkStatus();
}
CalculatorGraphConfig Graph::GetCalculatorGraphConfig() {
CalculatorGraph temp_graph;
::mediapipe::Status status = InitializeGraph(&temp_graph);
if (!status.ok()) {
LOG(ERROR) << "GetCalculatorGraphConfig failed:\n" << status.message();
}
return temp_graph.Config();
}
void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj,
const Packet& packet) {
jclass callback_cls = env->GetObjectClass(java_callback_obj);
jmethodID processMethod = env->GetMethodID(
callback_cls, "process",
absl::StrFormat("(L%s;)V", std::string(Graph::kJavaPacketClassName))
.c_str());
int64_t packet_handle = WrapPacketIntoContext(packet);
// Creates a Java Packet.
VLOG(2) << "Creating java packet preparing for callback to java.";
jobject java_packet =
CreateJavaPacket(env, global_java_packet_cls_, packet_handle);
VLOG(2) << "Calling java callback.";
env->CallVoidMethod(java_callback_obj, processMethod, java_packet);
// release the packet after callback.
RemovePacket(packet_handle);
env->DeleteLocalRef(callback_cls);
env->DeleteLocalRef(java_packet);
VLOG(2) << "Returned from java callback.";
}
void Graph::CallbackToJava(JNIEnv* env, jobject java_callback_obj,
const Packet& packet, const Packet& header_packet) {
jclass callback_cls = env->GetObjectClass(java_callback_obj);
jmethodID processMethod = env->GetMethodID(
callback_cls, "process",
absl::StrFormat("(L%s;L%s;)V", std::string(Graph::kJavaPacketClassName),
std::string(Graph::kJavaPacketClassName))
.c_str());
int64_t packet_handle = WrapPacketIntoContext(packet);
int64_t header_packet_handle = WrapPacketIntoContext(header_packet);
// Creates a Java Packet.
jobject java_packet =
CreateJavaPacket(env, global_java_packet_cls_, packet_handle);
jobject java_header_packet =
CreateJavaPacket(env, global_java_packet_cls_, header_packet_handle);
env->CallVoidMethod(java_callback_obj, processMethod, java_packet,
java_header_packet);
// release the packet after callback.
RemovePacket(packet_handle);
RemovePacket(header_packet_handle);
env->DeleteLocalRef(callback_cls);
env->DeleteLocalRef(java_packet);
env->DeleteLocalRef(java_header_packet);
}
void Graph::SetPacketJavaClass(JNIEnv* env) {
if (global_java_packet_cls_ == nullptr) {
jclass packet_cls =
env->FindClass(mediapipe::android::Graph::kJavaPacketClassName);
global_java_packet_cls_ =
reinterpret_cast<jclass>(env->NewGlobalRef(packet_cls));
}
}
::mediapipe::Status Graph::RunGraphUntilClose(JNIEnv* env) {
// Get a global reference to the packet class, so it can be used in other
// native thread for call back.
SetPacketJavaClass(env);
// Running as a synchronized mode, the same Java thread is available through
// out the run.
CalculatorGraph calculator_graph;
::mediapipe::Status status = InitializeGraph(&calculator_graph);
if (!status.ok()) {
LOG(ERROR) << status.message();
running_graph_.reset(nullptr);
return status;
}
// TODO: gpu & services set up!
status = calculator_graph.Run(CreateCombinedSidePackets());
LOG(INFO) << "Graph run finished.";
return status;
}
::mediapipe::Status Graph::StartRunningGraph(JNIEnv* env) {
if (running_graph_) {
return ::mediapipe::InternalError("Graph is already running.");
}
// Get a global reference to the packet class, so it can be used in other
// native thread for call back.
SetPacketJavaClass(env);
// Running as a synchronized mode, the same Java thread is available
// throughout the run.
running_graph_.reset(new CalculatorGraph());
// Set the mode for adding packets to graph input streams.
running_graph_->SetGraphInputStreamAddMode(graph_input_stream_add_mode_);
if (VLOG_IS_ON(2)) {
LOG(INFO) << "input packet streams:";
for (auto& name : graph_config()->input_stream()) {
LOG(INFO) << name;
}
}
::mediapipe::Status status;
#ifndef MEDIAPIPE_DISABLE_GPU
status = running_graph_->SetGpuResources(gpu_resources_);
if (!status.ok()) {
LOG(ERROR) << status.message();
running_graph_.reset(nullptr);
return status;
}
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
for (const auto& service_packet : service_packets_) {
status = running_graph_->SetServicePacket(*service_packet.first,
service_packet.second);
if (!status.ok()) {
LOG(ERROR) << status.message();
running_graph_.reset(nullptr);
return status;
}
}
status = InitializeGraph(running_graph_.get());
if (!status.ok()) {
LOG(ERROR) << status.message();
running_graph_.reset(nullptr);
return status;
}
LOG(INFO) << "Start running the graph, waiting for inputs.";
status =
running_graph_->StartRun(CreateCombinedSidePackets(), stream_headers_);
if (!status.ok()) {
LOG(ERROR) << status;
running_graph_.reset(nullptr);
return status;
}
return mediapipe::OkStatus();
}
::mediapipe::Status Graph::SetTimestampAndMovePacketToInputStream(
const std::string& stream_name, int64_t packet_handle, int64_t timestamp) {
internal::PacketWithContext* packet_with_context =
reinterpret_cast<internal::PacketWithContext*>(packet_handle);
Packet& packet = packet_with_context->packet();
// Set the timestamp of the packet in-place by calling the rvalue-reference
// version of At here.
packet = std::move(packet).At(Timestamp(timestamp));
// Then std::move it into the input stream.
return AddPacketToInputStream(stream_name, std::move(packet));
}
::mediapipe::Status Graph::AddPacketToInputStream(
const std::string& stream_name, const Packet& packet) {
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
return running_graph_->AddPacketToInputStream(stream_name, packet);
}
::mediapipe::Status Graph::AddPacketToInputStream(
const std::string& stream_name, Packet&& packet) {
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
return running_graph_->AddPacketToInputStream(stream_name, std::move(packet));
}
::mediapipe::Status Graph::CloseInputStream(std::string stream_name) {
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
LOG(INFO) << "Close input stream: " << stream_name;
return running_graph_->CloseInputStream(stream_name);
}
::mediapipe::Status Graph::CloseAllInputStreams() {
LOG(INFO) << "Close all input streams.";
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
return running_graph_->CloseAllInputStreams();
}
::mediapipe::Status Graph::CloseAllPacketSources() {
LOG(INFO) << "Close all input streams.";
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
return running_graph_->CloseAllPacketSources();
}
::mediapipe::Status Graph::WaitUntilDone(JNIEnv* env) {
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
::mediapipe::Status status = running_graph_->WaitUntilDone();
running_graph_.reset(nullptr);
return status;
}
::mediapipe::Status Graph::WaitUntilIdle(JNIEnv* env) {
if (!running_graph_) {
return ::mediapipe::FailedPreconditionError("Graph must be running.");
}
return running_graph_->WaitUntilIdle();
}
void Graph::SetInputSidePacket(const std::string& stream_name,
const Packet& packet) {
side_packets_[stream_name] = packet;
}
void Graph::SetStreamHeader(const std::string& stream_name,
const Packet& packet) {
stream_headers_[stream_name] = packet;
LOG(INFO) << stream_name << " stream header being set.";
}
void Graph::SetGraphInputStreamAddMode(
CalculatorGraph::GraphInputStreamAddMode mode) {
graph_input_stream_add_mode_ = mode;
}
mediapipe::GpuResources* Graph::GetGpuResources() const {
return gpu_resources_.get();
}
::mediapipe::Status Graph::SetParentGlContext(int64 java_gl_context) {
if (gpu_resources_) {
return ::mediapipe::AlreadyExistsError(
"trying to set the parent GL context, but the gpu shared "
"data has already been set up.");
}
#ifdef MEDIAPIPE_DISABLE_GPU
LOG(FATAL) << "GPU support has been disabled in this build!";
#else
gpu_resources_ = mediapipe::GpuResources::Create(
reinterpret_cast<EGLContext>(java_gl_context))
.ValueOrDie();
#endif // defined(MEDIAPIPE_DISABLE_GPU)
return ::mediapipe::OkStatus();
}
void Graph::SetServicePacket(const GraphServiceBase& service, Packet packet) {
service_packets_[&service] = std::move(packet);
}
void Graph::CancelGraph() {
if (running_graph_) {
running_graph_->Cancel();
}
}
std::map<std::string, Packet> Graph::CreateCombinedSidePackets() {
std::map<std::string, Packet> combined_side_packets = side_packets_callbacks_;
combined_side_packets.insert(side_packets_.begin(), side_packets_.end());
combined_side_packets.insert(output_surface_side_packets_.begin(),
output_surface_side_packets_.end());
return combined_side_packets;
}
ProfilingContext* Graph::GetProfilingContext() {
if (running_graph_) {
return running_graph_->profiler();
}
return nullptr;
}
CalculatorGraphConfig* Graph::graph_config() {
// Return the last specified graph config with the required graph_type.
for (auto it = graph_configs_.rbegin(); it != graph_configs_.rend(); ++it) {
if (it->type() == graph_type()) {
return &*it;
}
}
for (auto it = graph_templates_.rbegin(); it != graph_templates_.rend();
++it) {
if (it->mutable_config()->type() == graph_type()) {
return it->mutable_config();
}
}
return nullptr;
}
std::string Graph::graph_type() {
// If a graph-type is specified, that type is used. Otherwise the
// graph-type of the last specified graph config is used.
if (graph_type_ != "<none>") {
return graph_type_;
}
if (!graph_configs_.empty()) {
return graph_configs_.back().type();
}
if (!graph_templates_.empty()) {
return graph_templates_.back().config().type();
}
return "";
}
::mediapipe::Status Graph::InitializeGraph(CalculatorGraph* graph) {
if (graph_configs_.size() == 1 && graph_templates_.empty()) {
return graph->Initialize(*graph_config());
} else {
return graph->Initialize(graph_configs_, graph_templates_, {}, graph_type(),
&graph_options_);
}
}
} // namespace android
} // namespace mediapipe

View File

@ -0,0 +1,247 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_
#include <jni.h>
#include <map>
#include <memory>
#include <queue>
#include <string>
#include <unordered_map>
#include <vector>
#include "mediapipe/framework/calculator_framework.h"
#ifndef MEDIAPIPE_DISABLE_GPU
#include "mediapipe/gpu/gl_calculator_helper.h"
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
#include "absl/synchronization/mutex.h"
#include "mediapipe/gpu/gpu_shared_data_internal.h"
namespace mediapipe {
namespace android {
namespace internal {
class CallbackHandler;
class PacketWithContext;
} // namespace internal
// Graph is used to keep mediapipe related native objects into one place,
// so that we can clean up or query later.
class Graph {
public:
// The Packet java class name.
static constexpr char const* kJavaPacketClassName =
"com/google/mediapipe/framework/Packet";
Graph();
Graph(const Graph&) = delete;
Graph& operator=(const Graph&) = delete;
~Graph();
// Adds a callback for a given stream name.
::mediapipe::Status AddCallbackHandler(std::string output_stream_name,
jobject java_callback);
// Adds a packet with header callback for a given stream name.
::mediapipe::Status AddCallbackWithHeaderHandler(
std::string output_stream_name, jobject java_callback);
// Loads a binary graph from a file.
::mediapipe::Status LoadBinaryGraph(std::string path_to_graph);
// Loads a binary graph from a buffer.
::mediapipe::Status LoadBinaryGraph(const char* data, int size);
// Loads a binary graph template from a buffer.
::mediapipe::Status LoadBinaryGraphTemplate(const char* data, int size);
// Specifies the CalculatorGraphConfig::type of the top level graph.
::mediapipe::Status SetGraphType(std::string graph_type);
// Specifies options such as template arguments for the graph.
::mediapipe::Status SetGraphOptions(const char* data, int size);
// Returns the expanded calculator graph config.
CalculatorGraphConfig GetCalculatorGraphConfig();
// Runs the graph until it closes.
// Mainly is used for writing tests.
::mediapipe::Status RunGraphUntilClose(JNIEnv* env);
// The following 4 functions are used to run the graph in
// step by step mode, the usual call sequence is like this:
// StartRunningGraph
// Loop:
// AddPacketToInputStream
// CloseInputStream
// WaitUtilDone
// TODO: We need to have a synchronized wait for each step, i.e.,
// wait until nothing is running and nothing can be scheduled.
//
// Starts running the graph.
::mediapipe::Status StartRunningGraph(JNIEnv* env);
// Closes one input stream.
::mediapipe::Status CloseInputStream(std::string stream_name);
// Closes all the graph input streams.
::mediapipe::Status CloseAllInputStreams();
// Closes all the graph packet sources.
::mediapipe::Status CloseAllPacketSources();
// Waits util graph is done.
::mediapipe::Status WaitUntilDone(JNIEnv* env);
// Waits util graph is idle.
::mediapipe::Status WaitUntilIdle(JNIEnv* env);
// Adds a packet to an input stream.
::mediapipe::Status AddPacketToInputStream(const std::string& stream_name,
const Packet& packet);
// Moves a packet into an input stream.
::mediapipe::Status AddPacketToInputStream(const std::string& stream_name,
Packet&& packet);
// Takes the MediaPipe Packet referenced by the handle, sets its timestamp,
// and then tries to move the Packet into the given input stream.
::mediapipe::Status SetTimestampAndMovePacketToInputStream(
const std::string& stream_name, int64_t packet_handle, int64_t timestamp);
// Sets the mode for adding packets to a graph input stream.
void SetGraphInputStreamAddMode(
CalculatorGraph::GraphInputStreamAddMode mode);
// Adds one input side packet.
void SetInputSidePacket(const std::string& stream_name, const Packet& packet);
// Adds one stream header.
void SetStreamHeader(const std::string& stream_name, const Packet& packet);
// Puts a mediapipe packet into the context for management.
// Returns the handle to the internal PacketWithContext object.
int64_t WrapPacketIntoContext(const Packet& packet);
// Gets the shared mediapipe::GpuResources. Only valid once the graph is
// running.
mediapipe::GpuResources* GetGpuResources() const;
// Adds a surface output for a given stream name.
// Multiple outputs can be attached to the same stream.
// Returns a native packet handle for the mediapipe::EglSurfaceHolder, or 0 in
// case of failure.
int64_t AddSurfaceOutput(const std::string& stream_name);
// Sets a parent GL context to use for texture sharing.
::mediapipe::Status SetParentGlContext(int64 java_gl_context);
// Sets the object for a service.
template <typename T>
void SetServiceObject(const GraphService<T>& service,
std::shared_ptr<T> object) {
SetServicePacket(service,
MakePacket<std::shared_ptr<T>>(std::move(object)));
}
void SetServicePacket(const GraphServiceBase& service, Packet packet);
// Cancels the currently running graph.
void CancelGraph();
// Returns false if not in the context.
static bool RemovePacket(int64_t packet_handle);
// Returns the mediapipe Packet that is referenced by the handle.
static Packet GetPacketFromHandle(int64_t packet_handle);
// Returns the Graph that is managing the packet.
static Graph* GetContextFromHandle(int64_t packet_handle);
// Invokes a Java packet callback.
void CallbackToJava(JNIEnv* env, jobject java_callback_obj,
const Packet& packet);
// Invokes a Java packet callback with header.
void CallbackToJava(JNIEnv* env, jobject java_callback_obj,
const Packet& packet, const Packet& header_packet);
ProfilingContext* GetProfilingContext();
private:
// Increase the graph's default executor's worker thread stack size to run
// Java callbacks. Java's class loader may make deep recursive calls and
// result in a StackOverflowError. The non-portable ThreadPool class in
// thread/threadpool.h uses a default stack size of 64 KB, which is too
// small for Java's class loader. See bug 72414047.
void EnsureMinimumExecutorStackSizeForJava();
void SetPacketJavaClass(JNIEnv* env);
std::map<std::string, Packet> CreateCombinedSidePackets();
// Returns the top-level CalculatorGraphConfig, or nullptr if the top-level
// CalculatorGraphConfig is not yet defined.
CalculatorGraphConfig* graph_config();
// Returns the top-level CalculatorGraphConfig::type, or "" if the top-level
// CalculatorGraphConfig::type is not yet defined.
std::string graph_type();
// Initializes CalculatorGraph |graph| using the loaded graph-configs.
::mediapipe::Status InitializeGraph(CalculatorGraph* graph);
// CalculatorGraphConfigs for the calculator graph and subgraphs.
std::vector<CalculatorGraphConfig> graph_configs_;
// CalculatorGraphTemplates for the calculator graph and subgraphs.
std::vector<CalculatorGraphTemplate> graph_templates_;
// Options such as template arguments for the top-level calculator graph.
Subgraph::SubgraphOptions graph_options_;
// The CalculatorGraphConfig::type of the top-level calculator graph.
std::string graph_type_ = "<none>";
// Used by EnsureMinimumExecutorStackSizeForJava() to ensure that the
// default executor's stack size is increased only once.
bool executor_stack_size_increased_;
// Holds a global reference to a Packet class, so that this can be
// used from native attached thread. This is the suggested workaround for
// jni findclass issue.
jclass global_java_packet_cls_;
// All mediapipe Packet managed/referenced by the context.
// The map is used for the Java code to be able to look up the Packet
// based on the handler(pointer).
std::unordered_map<internal::PacketWithContext*,
std::unique_ptr<internal::PacketWithContext>>
all_packets_;
absl::Mutex all_packets_mutex_;
// All callback handlers managed by the context.
std::vector<std::unique_ptr<internal::CallbackHandler>> callback_handlers_;
// mediapipe::GpuResources used by the graph.
// Note: this class does not create a CalculatorGraph until StartRunningGraph
// is called, and we may have to create the mediapipe::GpuResources before
// that time, e.g. before a SurfaceOutput is associated with a Surface.
std::shared_ptr<mediapipe::GpuResources> gpu_resources_;
// Maps surface output names to the side packet used for the associated
// surface.
std::unordered_map<std::string, Packet> output_surface_side_packets_;
// Side packets used for callbacks.
std::map<std::string, Packet> side_packets_callbacks_;
// Side packets set using SetInputSidePacket.
std::map<std::string, Packet> side_packets_;
// Service packets held here before the graph's creation.
std::map<const GraphServiceBase*, Packet> service_packets_;
// All headers that required by the graph input streams.
// Note: header has to be set for the calculators that require it during
// Open().
std::map<std::string, Packet> stream_headers_;
std::unique_ptr<CalculatorGraph> running_graph_;
CalculatorGraph::GraphInputStreamAddMode graph_input_stream_add_mode_ =
CalculatorGraph::GraphInputStreamAddMode::WAIT_TILL_NOT_FULL;
};
} // namespace android
} // namespace mediapipe
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_H_

View File

@ -0,0 +1,40 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_gl_sync_token.h"
#include <memory>
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/gpu/gl_context.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnCpu)(
JNIEnv* env, jclass cls, jlong syncToken) {
mediapipe::GlSyncToken& token =
*reinterpret_cast<mediapipe::GlSyncToken*>(syncToken);
token->Wait();
}
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnGpu)(
JNIEnv* env, jclass cls, jlong syncToken) {
mediapipe::GlSyncToken& token =
*reinterpret_cast<mediapipe::GlSyncToken*>(syncToken);
token->WaitOnGpu();
}
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeRelease)(
JNIEnv* env, jclass cls, jlong syncToken) {
delete reinterpret_cast<mediapipe::GlSyncToken*>(syncToken);
}

View File

@ -0,0 +1,42 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define GRAPH_GL_SYNC_TOKEN_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_GraphGlSyncToken_##METHOD_NAME
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnCpu)(JNIEnv *,
jclass,
jlong);
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeWaitOnGpu)(JNIEnv *,
jclass,
jlong);
JNIEXPORT void JNICALL GRAPH_GL_SYNC_TOKEN_METHOD(nativeRelease)(JNIEnv *,
jclass, jlong);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_GL_SYNC_TOKEN_H_

View File

@ -0,0 +1,379 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_jni.h"
#include <memory>
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
using mediapipe::android::JStringToStdString;
namespace {
mediapipe::Status AddSidePacketsIntoGraph(
mediapipe::android::Graph* mediapipe_graph, JNIEnv* env,
jobjectArray stream_names, jlongArray packets) {
jsize num_side_packets = env->GetArrayLength(stream_names);
if (num_side_packets != env->GetArrayLength(packets)) {
return mediapipe::InvalidArgumentError(
"Number of streams and packets doesn't match!");
}
// Note, packets_array_ref is really a const jlong* but this clashes with the
// the expectation of ReleaseLongArrayElements below.
jlong* packets_array_ref = env->GetLongArrayElements(packets, nullptr);
for (jsize i = 0; i < num_side_packets; ++i) {
jstring name =
reinterpret_cast<jstring>(env->GetObjectArrayElement(stream_names, i));
mediapipe_graph->SetInputSidePacket(
JStringToStdString(env, name),
mediapipe::android::Graph::GetPacketFromHandle(packets_array_ref[i]));
env->DeleteLocalRef(name);
}
env->ReleaseLongArrayElements(packets, packets_array_ref, JNI_ABORT);
return mediapipe::OkStatus();
}
mediapipe::Status AddStreamHeadersIntoGraph(
mediapipe::android::Graph* mediapipe_graph, JNIEnv* env,
jobjectArray stream_names, jlongArray packets) {
jsize num_headers = env->GetArrayLength(stream_names);
if (num_headers != env->GetArrayLength(packets)) {
return mediapipe::Status(::mediapipe::StatusCode::kFailedPrecondition,
"Number of streams and packets doesn't match!");
}
jlong* packets_array_ref = env->GetLongArrayElements(packets, nullptr);
for (jsize i = 0; i < num_headers; ++i) {
jstring name =
reinterpret_cast<jstring>(env->GetObjectArrayElement(stream_names, i));
mediapipe_graph->SetStreamHeader(
JStringToStdString(env, name),
mediapipe::android::Graph::GetPacketFromHandle(packets_array_ref[i]));
env->DeleteLocalRef(name);
}
env->ReleaseLongArrayElements(packets, packets_array_ref, JNI_ABORT);
return mediapipe::OkStatus();
}
// Creates a java MediaPipeException object for a mediapipe::Status.
jthrowable CreateMediaPipeException(JNIEnv* env, mediapipe::Status status) {
jclass status_cls =
env->FindClass("com/google/mediapipe/framework/MediaPipeException");
jmethodID status_ctr = env->GetMethodID(status_cls, "<init>", "(I[B)V");
int length = status.message().length();
jbyteArray message_bytes = env->NewByteArray(length);
env->SetByteArrayRegion(message_bytes, 0, length,
reinterpret_cast<jbyte*>(const_cast<char*>(
std::string(status.message()).c_str())));
return reinterpret_cast<jthrowable>(
env->NewObject(status_cls, status_ctr, status.code(), message_bytes));
}
// Throws a MediaPipeException for any non-ok mediapipe::Status.
// Note that the exception is thrown after execution returns to Java.
bool ThrowIfError(JNIEnv* env, mediapipe::Status status) {
if (!status.ok()) {
env->Throw(CreateMediaPipeException(env, status));
return true;
}
return false;
}
} // namespace
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeCreateGraph)(JNIEnv* env,
jobject thiz) {
if (!mediapipe::java::SetJavaVM(env)) {
return 0;
}
return reinterpret_cast<int64_t>(new mediapipe::android::Graph());
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeReleaseGraph)(JNIEnv* env,
jobject thiz,
jlong context) {
delete reinterpret_cast<mediapipe::android::Graph*>(context);
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraph)(JNIEnv* env,
jobject thiz,
jlong context,
jstring path) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
const char* path_ref = env->GetStringUTFChars(path, nullptr);
// Make a copy of the std::string and release the jni reference.
std::string path_to_graph(path_ref);
env->ReleaseStringUTFChars(path, path_ref);
ThrowIfError(env, mediapipe_graph->LoadBinaryGraph(path_to_graph));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphBytes)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
jbyte* data_ptr = env->GetByteArrayElements(data, nullptr);
int size = env->GetArrayLength(data);
mediapipe::Status status =
mediapipe_graph->LoadBinaryGraph(reinterpret_cast<char*>(data_ptr), size);
env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT);
ThrowIfError(env, status);
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphTemplate)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
jbyte* data_ptr = env->GetByteArrayElements(data, nullptr);
int size = env->GetArrayLength(data);
mediapipe::Status status = mediapipe_graph->LoadBinaryGraphTemplate(
reinterpret_cast<char*>(data_ptr), size);
env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT);
ThrowIfError(env, status);
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphType)(JNIEnv* env,
jobject thiz,
jlong context,
jstring graph_type) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
const char* graph_type_ref = env->GetStringUTFChars(graph_type, nullptr);
// Make a copy of the std::string and release the jni reference.
std::string graph_type_string(graph_type_ref);
env->ReleaseStringUTFChars(graph_type, graph_type_ref);
ThrowIfError(env, mediapipe_graph->SetGraphType(graph_type_string));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphOptions)(JNIEnv* env,
jobject thiz,
jlong context,
jbyteArray data) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
jbyte* data_ptr = env->GetByteArrayElements(data, nullptr);
int size = env->GetArrayLength(data);
mediapipe::Status status =
mediapipe_graph->SetGraphOptions(reinterpret_cast<char*>(data_ptr), size);
env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT);
ThrowIfError(env, status);
}
JNIEXPORT jbyteArray JNICALL GRAPH_METHOD(nativeGetCalculatorGraphConfig)(
JNIEnv* env, jobject thiz, jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
auto graph = mediapipe_graph->GetCalculatorGraphConfig();
if (graph.IsInitialized()) {
int size = graph.ByteSize();
char* buffer = new char[size];
graph.SerializeToArray(buffer, size);
jbyteArray byteArray = env->NewByteArray(size);
env->SetByteArrayRegion(byteArray, 0, size,
reinterpret_cast<jbyte*>(buffer));
return byteArray;
}
return nullptr;
}
JNIEXPORT void JNICALL
GRAPH_METHOD(nativeAddPacketCallback)(JNIEnv* env, jobject thiz, jlong context,
jstring stream_name, jobject callback) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
std::string output_stream_name = JStringToStdString(env, stream_name);
// Create a global reference to the callback object, so that it can
// be accessed later.
jobject global_callback_ref = env->NewGlobalRef(callback);
if (!global_callback_ref) {
ThrowIfError(
env, ::mediapipe::InternalError("Failed to allocate packet callback"));
return;
}
ThrowIfError(env, mediapipe_graph->AddCallbackHandler(output_stream_name,
global_callback_ref));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketWithHeaderCallback)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name,
jobject callback) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
std::string output_stream_name = JStringToStdString(env, stream_name);
// Create a global reference to the callback object, so that it can
// be accessed later.
jobject global_callback_ref = env->NewGlobalRef(callback);
if (!global_callback_ref) {
ThrowIfError(
env, ::mediapipe::InternalError("Failed to allocate packet callback"));
return;
}
ThrowIfError(env, mediapipe_graph->AddCallbackWithHeaderHandler(
output_stream_name, global_callback_ref));
}
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeAddSurfaceOutput)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
std::string output_stream_name = JStringToStdString(env, stream_name);
return mediapipe_graph->AddSurfaceOutput(output_stream_name);
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeRunGraphUntilClose)(
JNIEnv* env, jobject thiz, jlong context, jobjectArray stream_names,
jlongArray packets) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
if (ThrowIfError(env, AddSidePacketsIntoGraph(mediapipe_graph, env,
stream_names, packets))) {
return;
}
ThrowIfError(env, mediapipe_graph->RunGraphUntilClose(env));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeStartRunningGraph)(
JNIEnv* env, jobject thiz, jlong context, jobjectArray side_packet_names,
jlongArray side_packet_handles, jobjectArray stream_names_with_header,
jlongArray header_handles) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
if (ThrowIfError(
env, AddSidePacketsIntoGraph(mediapipe_graph, env, side_packet_names,
side_packet_handles))) {
return;
}
if (ThrowIfError(env, AddStreamHeadersIntoGraph(mediapipe_graph, env,
stream_names_with_header,
header_handles))) {
return;
}
ThrowIfError(env, mediapipe_graph->StartRunningGraph(env));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketToInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet,
jlong timestamp) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
// We push in a copy of the current packet at the given timestamp.
ThrowIfError(env,
mediapipe_graph->AddPacketToInputStream(
JStringToStdString(env, stream_name),
mediapipe::android::Graph::GetPacketFromHandle(packet).At(
mediapipe::Timestamp(timestamp))));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeMovePacketToInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet,
jlong timestamp) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(
env, mediapipe_graph->SetTimestampAndMovePacketToInputStream(
JStringToStdString(env, stream_name),
static_cast<int64_t>(packet), static_cast<int64_t>(timestamp)));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphInputStreamBlockingMode)(
JNIEnv* env, jobject thiz, jlong context, jboolean mode) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
if (mode) {
mediapipe_graph->SetGraphInputStreamAddMode(
mediapipe::CalculatorGraph::GraphInputStreamAddMode::
WAIT_TILL_NOT_FULL);
} else {
mediapipe_graph->SetGraphInputStreamAddMode(
mediapipe::CalculatorGraph::GraphInputStreamAddMode::ADD_IF_NOT_FULL);
}
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->CloseInputStream(
JStringToStdString(env, stream_name)));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllInputStreams)(JNIEnv* env,
jobject thiz,
jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->CloseAllInputStreams());
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllPacketSources)(
JNIEnv* env, jobject thiz, jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->CloseAllPacketSources());
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphDone)(JNIEnv* env,
jobject thiz,
jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->WaitUntilDone(env));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphIdle)(JNIEnv* env,
jobject thiz,
jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->WaitUntilIdle(env));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeUpdatePacketReference)(
JNIEnv* env, jobject thiz, jlong reference_packet, jlong new_packet) {
auto reference =
mediapipe::android::Graph::GetPacketFromHandle(reference_packet)
.Get<std::unique_ptr<mediapipe::SyncedPacket>>()
.get();
auto new_value = mediapipe::android::Graph::GetPacketFromHandle(new_packet);
reference->UpdatePacket(new_value);
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetParentGlContext)(
JNIEnv* env, jobject thiz, jlong context, jlong javaGlContext) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
ThrowIfError(env, mediapipe_graph->SetParentGlContext(javaGlContext));
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCancelGraph)(JNIEnv* env,
jobject thiz,
jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
mediapipe_graph->CancelGraph();
}
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeGetProfiler)(JNIEnv* env,
jobject thiz,
jlong context) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
return reinterpret_cast<jlong>(mediapipe_graph->GetProfilingContext());
}

View File

@ -0,0 +1,129 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define GRAPH_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_Graph_##METHOD_NAME
// Creates a native mediapipe context.
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeCreateGraph)(JNIEnv* env,
jobject thiz);
// Releases a native mediapipe context.
JNIEXPORT void JNICALL GRAPH_METHOD(nativeReleaseGraph)(JNIEnv* env,
jobject thiz,
jlong context);
// Loads a binary mediapipe graph into the context.
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraph)(JNIEnv* env,
jobject thiz,
jlong context,
jstring path);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphBytes)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeLoadBinaryGraphTemplate)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphType)(JNIEnv* env,
jobject thiz,
jlong context,
jstring graph_type);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphOptions)(JNIEnv* env,
jobject thiz,
jlong context,
jbyteArray data);
JNIEXPORT jbyteArray JNICALL GRAPH_METHOD(nativeGetCalculatorGraphConfig)(
JNIEnv* env, jobject thiz, jlong context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketCallback)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name,
jobject callback);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketWithHeaderCallback)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name,
jobject callback);
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeAddSurfaceOutput)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeRunGraphUntilClose)(
JNIEnv* env, jobject thiz, jlong context, jobjectArray stream_names,
jlongArray packets);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeStartRunningGraph)(
JNIEnv* env, jobject thiz, jlong context, jobjectArray side_packet_names,
jlongArray side_packet_handles, jobjectArray stream_names_with_header,
jlongArray header_handles);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeAddPacketToInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet,
jlong timestamp);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeMovePacketToInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name, jlong packet,
jlong timestamp);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetGraphInputStreamBlockingMode)(
JNIEnv* env, jobject thiz, jlong context, jboolean mode);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseInputStream)(
JNIEnv* env, jobject thiz, jlong context, jstring stream_name);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllInputStreams)(JNIEnv* env,
jobject thiz,
jlong context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCloseAllPacketSources)(JNIEnv* env,
jobject thiz,
jlong context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphDone)(JNIEnv* env,
jobject thiz,
jlong context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeWaitUntilGraphIdle)(JNIEnv* env,
jobject thiz,
jlong context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeUpdatePacketReference)(
JNIEnv* env, jobject thiz, jlong reference_packet, jlong new_packet);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeSetParentGlContext)(
JNIEnv* env, jobject thiz, jlong context, jlong javaGlContext);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeCancelGraph)(JNIEnv* env,
jobject thiz,
jlong context);
JNIEXPORT jlong JNICALL GRAPH_METHOD(nativeGetProfiler)(JNIEnv* env,
jobject thiz,
jlong context);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_JNI_H_

View File

@ -0,0 +1,72 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_profiler_jni.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/calculator_profile.pb.h"
JNIEXPORT void JNICALL GRAPH_METHOD(nativeReset)(JNIEnv* env, jobject thiz,
jlong handle) {
mediapipe::ProfilingContext* profiling_context =
reinterpret_cast<mediapipe::ProfilingContext*>(handle);
profiling_context->Reset();
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativePause)(JNIEnv* env, jobject thiz,
jlong handle) {
mediapipe::ProfilingContext* profiling_context =
reinterpret_cast<mediapipe::ProfilingContext*>(handle);
profiling_context->Pause();
}
JNIEXPORT void JNICALL GRAPH_METHOD(nativeResume)(JNIEnv* env, jobject thiz,
jlong handle) {
mediapipe::ProfilingContext* profiling_context =
reinterpret_cast<mediapipe::ProfilingContext*>(handle);
profiling_context->Resume();
}
JNIEXPORT jobjectArray JNICALL GRAPH_METHOD(nativeGetCalculatorProfiles)(
JNIEnv* env, jobject thiz, jlong handle) {
mediapipe::ProfilingContext* profiling_context =
reinterpret_cast<mediapipe::ProfilingContext*>(handle);
std::vector<mediapipe::CalculatorProfile> profiles_vec;
if (profiling_context->GetCalculatorProfiles(&profiles_vec) !=
::mediapipe::OkStatus()) {
return nullptr;
}
int num_profiles = profiles_vec.size();
if (num_profiles == 0) {
return nullptr;
}
jobjectArray profiles =
env->NewObjectArray(num_profiles, env->FindClass("[B"), nullptr);
for (int i = 0; i < num_profiles; i++) {
const auto& profile = profiles_vec[i];
int size = profile.ByteSize();
jbyteArray byteArray = env->NewByteArray(size);
jbyte* byteArrayBuffer = env->GetByteArrayElements(byteArray, nullptr);
profile.SerializeToArray(byteArrayBuffer, size);
env->ReleaseByteArrayElements(byteArray, byteArrayBuffer, 0);
env->SetObjectArrayElement(profiles, i, byteArray);
env->DeleteLocalRef(byteArray);
}
return profiles;
}

View File

@ -0,0 +1,43 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define GRAPH_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_GraphProfiler_##METHOD_NAME
JNIEXPORT void JNICALL GRAPH_METHOD(nativeReset)(JNIEnv* env, jobject thiz,
jlong profiling_context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativeResume)(JNIEnv* env, jobject thiz,
jlong profiling_context);
JNIEXPORT void JNICALL GRAPH_METHOD(nativePause)(JNIEnv* env, jobject thiz,
jlong profiling_context);
JNIEXPORT jobjectArray JNICALL GRAPH_METHOD(nativeGetCalculatorProfiles)(
JNIEnv* env, jobject thiz, jlong profiling_context);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_PROFILER_JNI_H_

View File

@ -0,0 +1,31 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_service_jni.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
namespace mediapipe {
namespace android {
void GraphServiceHelper::SetServicePacket(jlong context_handle,
const GraphServiceBase& service,
Packet packet) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context_handle);
mediapipe_graph->SetServicePacket(service, packet);
}
} // namespace android
} // namespace mediapipe

View File

@ -0,0 +1,51 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_
#include <jni.h>
#include "mediapipe/framework/graph_service.h"
#include "mediapipe/framework/packet.h"
namespace mediapipe {
namespace android {
// Support class for handling graph services in JNI.
// It keeps the context argument opaque and avoids exposing the entire
// Graph to service JNI implementations.
class GraphServiceHelper {
public:
// Call this static method to provide a native service object in response to
// a call to GraphService#installServiceObject in Java.
// The context_handle parameter should be the same as passed to
// installServiceObject.
template <typename T>
static void SetServiceObject(jlong context_handle,
const GraphService<T>& service,
std::shared_ptr<T> object) {
SetServicePacket(context_handle, service,
MakePacket<std::shared_ptr<T>>(std::move(object)));
}
private:
static void SetServicePacket(jlong context_handle,
const GraphServiceBase& service, Packet packet);
};
} // namespace android
} // namespace mediapipe
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_SERVICE_JNI_H_

View File

@ -0,0 +1,49 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h"
#include "mediapipe/gpu/gl_calculator_helper.h"
#include "mediapipe/gpu/gl_texture_buffer.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
using mediapipe::GlTextureBufferSharedPtr;
JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)(
JNIEnv* env, jobject thiz, jlong nativeHandle) {
GlTextureBufferSharedPtr* buffer =
reinterpret_cast<GlTextureBufferSharedPtr*>(nativeHandle);
delete buffer;
}
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetTextureName)(
JNIEnv* env, jobject thiz, jlong nativeHandle) {
GlTextureBufferSharedPtr* buffer =
reinterpret_cast<GlTextureBufferSharedPtr*>(nativeHandle);
return (*buffer)->name();
}
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetWidth)(
JNIEnv* env, jobject thiz, jlong nativeHandle) {
GlTextureBufferSharedPtr* buffer =
reinterpret_cast<GlTextureBufferSharedPtr*>(nativeHandle);
return (*buffer)->width();
}
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)(
JNIEnv* env, jobject thiz, jlong nativeHandle) {
GlTextureBufferSharedPtr* buffer =
reinterpret_cast<GlTextureBufferSharedPtr*>(nativeHandle);
return (*buffer)->height();
}

View File

@ -0,0 +1,44 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define GRAPH_TEXTURE_FRAME_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_GraphTextureFrame_##METHOD_NAME
// Releases a native mediapipe::GpuBuffer.
JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)(
JNIEnv* env, jobject thiz, jlong nativeHandle);
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetTextureName)(
JNIEnv* env, jobject thiz, jlong nativeHandle);
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetWidth)(
JNIEnv* env, jobject thiz, jlong nativeHandle);
JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)(
JNIEnv* env, jobject thiz, jlong nativeHandle);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_GRAPH_TEXTURE_FRAME_JNI_H_

View File

@ -0,0 +1,147 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
#include <pthread.h>
#include "absl/synchronization/mutex.h"
#include "mediapipe/framework/port/logging.h"
namespace {
ABSL_CONST_INIT absl::Mutex g_jvm_mutex(absl::kConstInit);
JavaVM* g_jvm GUARDED_BY(g_jvm_mutex);
class JvmThread {
public:
explicit JvmThread(JavaVM* jvm) {
jvm_ = jvm;
attached_ = false;
jni_env_ = nullptr;
int get_env_stat =
jvm_->GetEnv(reinterpret_cast<void**>(&jni_env_), JNI_VERSION_1_6);
// TODO: report the error back to Java layer.
switch (get_env_stat) {
case JNI_OK:
break;
case JNI_EDETACHED:
LOG(INFO) << "GetEnv: not attached";
if (jvm_->AttachCurrentThread(
#ifdef __ANDROID__
&jni_env_,
#else
reinterpret_cast<void**>(&jni_env_),
#endif // __ANDROID__
nullptr) != 0) {
LOG(ERROR) << "Failed to attach to java thread.";
break;
}
attached_ = true;
break;
case JNI_EVERSION:
LOG(ERROR) << "GetEnv: jni version not supported.";
break;
default:
LOG(ERROR) << "GetEnv: unknown status.";
break;
}
}
~JvmThread() {
if (attached_) {
jvm_->DetachCurrentThread();
}
}
JNIEnv* GetEnv() const { return jni_env_; }
private:
bool attached_;
JavaVM* jvm_;
JNIEnv* jni_env_;
};
// Since current android abi doesn't have pthread_local, we have to rely on
// pthread functions to achieve the detachment of java thread when native thread
// exits (see: http://developer.android.com/training/articles/perf-jni.html).
static pthread_key_t jvm_thread_key;
static pthread_once_t key_once = PTHREAD_ONCE_INIT;
static void ThreadExitCallback(void* key_value) {
JvmThread* jvm_thread = reinterpret_cast<JvmThread*>(key_value);
// Detach the thread when thread exits.
LOG(INFO) << "Exiting thread. Detach thread.";
delete jvm_thread;
}
void MakeKey() { pthread_key_create(&jvm_thread_key, ThreadExitCallback); }
// Returns the global Java VM instance.
JavaVM* GetJavaVM() {
absl::MutexLock lock(&g_jvm_mutex);
return g_jvm;
}
} // namespace
namespace mediapipe {
namespace android {
std::string JStringToStdString(JNIEnv* env, jstring jstr) {
const char* s = env->GetStringUTFChars(jstr, 0);
if (!s) {
return std::string();
}
std::string str(s);
env->ReleaseStringUTFChars(jstr, s);
return str;
}
} // namespace android
namespace java {
bool HasJavaVM() {
absl::MutexLock lock(&g_jvm_mutex);
return g_jvm != nullptr;
}
bool SetJavaVM(JNIEnv* env) {
absl::MutexLock lock(&g_jvm_mutex);
if (!g_jvm) {
if (env->GetJavaVM(&g_jvm) != JNI_OK) {
LOG(ERROR) << "Can not get the Java VM instance!";
g_jvm = nullptr;
return false;
}
}
return true;
}
JNIEnv* GetJNIEnv() {
pthread_once(&key_once, MakeKey);
JvmThread* jvm_thread =
reinterpret_cast<JvmThread*>(pthread_getspecific(jvm_thread_key));
if (jvm_thread == nullptr) {
jvm_thread = new JvmThread(GetJavaVM());
pthread_setspecific(jvm_thread_key, jvm_thread);
}
return jvm_thread->GetEnv();
}
} // namespace java
} // namespace mediapipe

View File

@ -0,0 +1,46 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_
#include <jni.h>
#include <string>
namespace mediapipe {
namespace android {
std::string JStringToStdString(JNIEnv* env, jstring jstr);
} // namespace android
namespace java {
// Sets the global Java VM instance, if it is not set yet.
// Returns true on success.
bool SetJavaVM(JNIEnv* env);
// Determines if the global Java VM instance is available.
bool HasJavaVM();
// Returns the current JNI environment.
JNIEnv* GetJNIEnv();
} // namespace java
} // namespace mediapipe
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_JNI_UTIL_H_

View File

@ -0,0 +1,54 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_context_jni.h"
#include "absl/strings/str_format.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
// Releases a native mediapipe packet.
JNIEXPORT void JNICALL PACKET_METHOD(nativeReleasePacket)(JNIEnv* env,
jobject thiz,
jlong packet) {
// Removes the packet from the mediapipe context.
mediapipe::android::Graph::RemovePacket(packet);
}
JNIEXPORT jlong JNICALL PACKET_METHOD(nativeGetTimestamp)(JNIEnv* env,
jobject thiz,
jlong packet) {
return mediapipe::android::Graph::GetPacketFromHandle(packet)
.Timestamp()
.Value();
}
JNIEXPORT jlong JNICALL PACKET_METHOD(nativeCopyPacket)(JNIEnv* env,
jobject thiz,
jlong packet) {
auto mediapipe_graph =
mediapipe::android::Graph::GetContextFromHandle(packet);
mediapipe::Packet mediapipe_packet =
mediapipe::android::Graph::GetPacketFromHandle(packet);
return mediapipe_graph->WrapPacketIntoContext(mediapipe_packet);
}
jobject CreateJavaPacket(JNIEnv* env, jclass packet_cls, jlong packet) {
jmethodID createMethod = env->GetStaticMethodID(
packet_cls, "create",
absl::StrFormat(
"(J)L%s;",
std::string(mediapipe::android::Graph::kJavaPacketClassName))
.c_str());
return env->CallStaticObjectMethod(packet_cls, createMethod, packet);
}

View File

@ -0,0 +1,49 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define PACKET_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_Packet_##METHOD_NAME
// Releases a native mediapipe packet.
JNIEXPORT void JNICALL PACKET_METHOD(nativeReleasePacket)(JNIEnv* env,
jobject thiz,
jlong packet);
// Returns the timestamp of the packet.
JNIEXPORT jlong JNICALL PACKET_METHOD(nativeGetTimestamp)(JNIEnv* env,
jobject thiz,
jlong packet);
// Make a copy of a mediapipe packet, basically increase the reference count.
JNIEXPORT jlong JNICALL PACKET_METHOD(nativeCopyPacket)(JNIEnv* env,
jobject thiz,
jlong packet);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
// Calls the java method to create an instance of java Packet.
jobject CreateJavaPacket(JNIEnv* env, jclass packet_cls, jlong packet);
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CONTEXT_JNI_H_

View File

@ -0,0 +1,389 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h"
#include <cstring>
#include <memory>
#include "mediapipe/framework/camera_intrinsics.h"
#include "mediapipe/framework/formats/image_format.pb.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/matrix.h"
#include "mediapipe/framework/formats/time_series_header.pb.h"
#include "mediapipe/framework/formats/video_stream_header.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
#ifndef MEDIAPIPE_DISABLE_GPU
#include "mediapipe/gpu/gl_calculator_helper.h"
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
namespace {
template <class T>
int64_t CreatePacketScalar(jlong context, const T& value) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
mediapipe::Packet packet = mediapipe::Adopt(new T(value));
return mediapipe_graph->WrapPacketIntoContext(packet);
}
// Creates a new internal::PacketWithContext object, and returns the native
// handle.
int64_t CreatePacketWithContext(jlong context,
const mediapipe::Packet& packet) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
return mediapipe_graph->WrapPacketIntoContext(packet);
}
} // namespace
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateReferencePacket)(
JNIEnv* env, jobject thiz, jlong context, jlong packet) {
auto mediapipe_graph = reinterpret_cast<mediapipe::android::Graph*>(context);
mediapipe::Packet mediapipe_packet =
mediapipe::android::Graph::GetPacketFromHandle(packet);
auto reference_packet = mediapipe::AdoptAsUniquePtr(
new mediapipe::SyncedPacket(mediapipe_packet));
// assigned the initial value of the packet reference.
return mediapipe_graph->WrapPacketIntoContext(reference_packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImage)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height) {
const void* data = env->GetDirectBufferAddress(byte_buffer);
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, width, height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
if (buffer_size != image_frame->PixelDataSize()) {
LOG(ERROR) << "The input image buffer should have 4 bytes alignment.";
LOG(ERROR) << "Buffer size: " << buffer_size
<< ", Buffer size needed: " << image_frame->PixelDataSize()
<< ", Image width: " << width;
return 0L;
}
std::memcpy(image_frame->MutablePixelData(), data,
image_frame->PixelDataSize());
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImageFromRgba)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height) {
const uint8_t* rgba_data =
static_cast<uint8_t*>(env->GetDirectBufferAddress(byte_buffer));
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, width, height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
if (buffer_size != width * height * 4) {
LOG(ERROR) << "Please check the input buffer size.";
LOG(ERROR) << "Buffer size: " << buffer_size
<< ", Buffer size needed: " << width * height * 4
<< ", Image width: " << width;
return 0L;
}
mediapipe::android::RgbaToRgb(rgba_data, width * 4, width, height,
image_frame->MutablePixelData(),
image_frame->WidthStep());
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGrayscaleImage)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height) {
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::GRAY8, width, height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
if (buffer_size != width * height) {
LOG(ERROR) << "Please check the input buffer size.";
LOG(ERROR) << "Buffer size: " << buffer_size
<< ", Buffer size needed: " << width * height
<< ", Image height: " << height;
return 0L;
}
int width_step = image_frame->WidthStep();
// Copy buffer data to image frame's pixel_data_.
const char* src_row =
reinterpret_cast<const char*>(env->GetDirectBufferAddress(byte_buffer));
char* dst_row = reinterpret_cast<char*>(image_frame->MutablePixelData());
for (int i = height; i > 0; --i) {
std::memcpy(dst_row, src_row, width);
src_row += width;
dst_row += width_step;
}
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbaImageFrame)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height) {
const void* rgba_data = env->GetDirectBufferAddress(byte_buffer);
auto image_frame = absl::make_unique<::mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGBA, width, height,
::mediapipe::ImageFrame::kGlDefaultAlignmentBoundary);
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
if (buffer_size != image_frame->PixelDataSize()) {
LOG(ERROR) << "Please check the input buffer size.";
LOG(ERROR) << "Buffer size: " << buffer_size
<< ", Buffer size needed: " << image_frame->PixelDataSize()
<< ", Image width: " << width;
return 0L;
}
std::memcpy(image_frame->MutablePixelData(), rgba_data,
image_frame->PixelDataSize());
mediapipe::Packet packet = mediapipe::Adopt(image_frame.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateAudioPacket)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data,
jint num_channels, jint num_samples) {
if (env->GetArrayLength(data) != num_channels * num_samples * 2) {
LOG(ERROR) << "Please check the audio data size, "
"has to be num_channels * num_samples * 2 = "
<< num_channels * num_samples * 2;
return 0L;
}
std::unique_ptr<::mediapipe::Matrix> matrix(
new ::mediapipe::Matrix(num_channels, num_samples));
// Note, audio_data_ref is really a const jbyte* but this clashes with the
// the expectation of ReleaseByteArrayElements below.
jbyte* audio_data_ref = env->GetByteArrayElements(data, nullptr);
// Preparing and normalize the audio data.
// kMultiplier is same as what used in av_sync_media_decoder.cc.
static const float kMultiplier = 1.f / (1 << 15);
// We try to not assume the Endian order of the data.
const uint8_t* audio_sample = reinterpret_cast<uint8_t*>(audio_data_ref);
for (int sample = 0; sample < num_samples; ++sample) {
for (int channel = 0; channel < num_channels; ++channel) {
int16_t value = (audio_sample[1] & 0xff) << 8 | audio_sample[0];
(*matrix)(channel, sample) = kMultiplier * value;
audio_sample += 2;
}
}
env->ReleaseByteArrayElements(data, audio_data_ref, JNI_ABORT);
mediapipe::Packet packet = mediapipe::Adopt(matrix.release());
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt16)(JNIEnv* env,
jobject thiz,
jlong context,
jshort value) {
return CreatePacketScalar<int16_t>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32)(JNIEnv* env,
jobject thiz,
jlong context,
jint value) {
return CreatePacketScalar<int>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt64)(JNIEnv* env,
jobject thiz,
jlong context,
jlong value) {
return CreatePacketScalar<int64_t>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32)(
JNIEnv* env, jobject thiz, jlong context, jfloat value) {
return CreatePacketScalar<float>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat64)(
JNIEnv* env, jobject thiz, jlong context, jdouble value) {
return CreatePacketScalar<double>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateBool)(
JNIEnv* env, jobject thiz, jlong context, jboolean value) {
return CreatePacketScalar<bool>(context, value);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateString)(
JNIEnv* env, jobject thiz, jlong context, jstring value) {
return CreatePacketScalar<std::string>(
context, mediapipe::android::JStringToStdString(env, value));
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateVideoHeader)(
JNIEnv* env, jobject thiz, jlong context, jint width, jint height) {
mediapipe::VideoHeader header;
header.format = mediapipe::ImageFormat::SRGB;
header.width = width;
header.height = height;
return CreatePacketScalar<mediapipe::VideoHeader>(context, header);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateTimeSeriesHeader)(
JNIEnv* env, jobject thiz, jlong context, jint num_channels,
jdouble sample_rate) {
mediapipe::TimeSeriesHeader header;
header.set_num_channels(num_channels);
header.set_sample_rate(sample_rate);
return CreatePacketScalar<mediapipe::TimeSeriesHeader>(context, header);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateMatrix)(
JNIEnv* env, jobject thiz, jlong context, jint rows, jint cols,
jfloatArray data) {
if (env->GetArrayLength(data) != rows * cols) {
LOG(ERROR) << "Please check the matrix data size, "
"has to be rows * cols = "
<< rows * cols;
return 0L;
}
std::unique_ptr<::mediapipe::Matrix> matrix(
new ::mediapipe::Matrix(rows, cols));
// The java and native has the same byte order, by default is little Endian,
// we can safely copy data directly, we have tests to cover this.
env->GetFloatArrayRegion(data, 0, rows * cols, matrix->data());
mediapipe::Packet packet = mediapipe::Adopt(matrix.release());
return CreatePacketWithContext(context, packet);
}
#ifndef MEDIAPIPE_DISABLE_GPU
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGpuBuffer)(
JNIEnv* env, jobject thiz, jlong context, jint name, jint width,
jint height, jobject texture_release_callback) {
mediapipe::android::Graph* mediapipe_graph =
reinterpret_cast<mediapipe::android::Graph*>(context);
auto* gpu_resources = mediapipe_graph->GetGpuResources();
CHECK(gpu_resources) << "Cannot create a mediapipe::GpuBuffer packet on a "
"graph without GPU support";
mediapipe::GlTextureBuffer::DeletionCallback cc_callback;
if (texture_release_callback) {
// TODO: see if this can be cached.
// Note: we don't get this from the object because people may pass a
// subclass of PacketCreator, and the method is private.
jclass my_class =
env->FindClass("com/google/mediapipe/framework/PacketCreator");
jmethodID release_method =
env->GetMethodID(my_class, "releaseWithSyncToken",
"(JL"
"com/google/mediapipe/framework/TextureReleaseCallback"
";)V");
CHECK(release_method);
env->DeleteLocalRef(my_class);
jobject java_callback = env->NewGlobalRef(texture_release_callback);
jobject packet_creator = env->NewGlobalRef(thiz);
cc_callback = [mediapipe_graph, packet_creator, release_method,
java_callback](mediapipe::GlSyncToken release_token) {
JNIEnv* env = mediapipe::java::GetJNIEnv();
jlong raw_token = reinterpret_cast<jlong>(
new mediapipe::GlSyncToken(std::move(release_token)));
env->CallVoidMethod(packet_creator, release_method, raw_token,
java_callback);
// Note that this callback is called only once, and is not saved
// anywhere else, so we can and should delete it here.
env->DeleteGlobalRef(java_callback);
env->DeleteGlobalRef(packet_creator);
};
}
mediapipe::Packet packet = mediapipe::MakePacket<mediapipe::GpuBuffer>(
mediapipe::GlTextureBuffer::Wrap(GL_TEXTURE_2D, name, width, height,
mediapipe::GpuBufferFormat::kBGRA32,
cc_callback));
return CreatePacketWithContext(context, packet);
}
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
// TODO: Add vector creators.
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32Array)(
JNIEnv* env, jobject thiz, jlong context, jfloatArray data) {
jsize count = env->GetArrayLength(data);
jfloat* data_ref = env->GetFloatArrayElements(data, nullptr);
float* floats = new float[count];
// jfloat is a "machine-dependent native type" which represents a 32-bit
// float. C++ makes no guarantees about the size of floating point types, and
// some exotic architectures don't even have 32-bit floats (or even binary
// floats), but on all architectures we care about this is a float.
static_assert(std::is_same<float, jfloat>::value, "jfloat must be float");
std::memcpy(floats, data_ref, count * sizeof(float));
env->ReleaseFloatArrayElements(data, data_ref, JNI_ABORT);
// The reinterpret_cast is needed to make the Adopt template recognize
// that this is an array - this way Holder will call delete[].
mediapipe::Packet packet =
mediapipe::Adopt(reinterpret_cast<float(*)[]>(floats));
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)(
JNIEnv* env, jobject thiz, jlong context, jintArray data) {
jsize count = env->GetArrayLength(data);
jint* data_ref = env->GetIntArrayElements(data, nullptr);
int32_t* ints = new int32_t[count];
static_assert(std::is_same<int32_t, jint>::value, "jint must be int32_t");
std::memcpy(ints, data_ref, count * sizeof(int32_t));
env->ReleaseIntArrayElements(data, data_ref, JNI_ABORT);
// The reinterpret_cast is needed to make the Adopt template recognize
// that this is an array - this way Holder will call delete[].
mediapipe::Packet packet =
mediapipe::Adopt(reinterpret_cast<int32_t(*)[]>(ints));
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data) {
jsize count = env->GetArrayLength(data);
jbyte* data_ref = env->GetByteArrayElements(data, nullptr);
mediapipe::Packet packet = mediapipe::Adopt(
new std::string(reinterpret_cast<char*>(data_ref), count));
env->ReleaseByteArrayElements(data, data_ref, JNI_ABORT);
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCalculatorOptions)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data) {
jsize count = env->GetArrayLength(data);
jbyte* data_ref = env->GetByteArrayElements(data, nullptr);
auto options = absl::make_unique<mediapipe::CalculatorOptions>();
if (!options->ParseFromArray(data_ref, count)) {
LOG(ERROR) << "Parsing binary-encoded CalculatorOptions failed.";
return 0L;
}
mediapipe::Packet packet = mediapipe::Adopt(options.release());
env->ReleaseByteArrayElements(data, data_ref, JNI_ABORT);
return CreatePacketWithContext(context, packet);
}
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCameraIntrinsics)(
JNIEnv* env, jobject thiz, jlong context, jfloat fx, jfloat fy, jfloat cx,
jfloat cy, jfloat width, jfloat height) {
mediapipe::Packet packet =
mediapipe::MakePacket<CameraIntrinsics>(fx, fy, cx, cy, width, height);
return CreatePacketWithContext(context, packet);
}

View File

@ -0,0 +1,116 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define PACKET_CREATOR_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_PacketCreator_##METHOD_NAME
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateReferencePacket)(
JNIEnv* env, jobject thiz, jlong context, jlong packet);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImage)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbaImageFrame)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateRgbImageFromRgba)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGrayscaleImage)(
JNIEnv* env, jobject thiz, jlong context, jobject byte_buffer, jint width,
jint height);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateAudioPacket)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data,
jint num_channels, jint num_samples);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt16)(JNIEnv* env,
jobject thiz,
jlong context,
jshort value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32)(JNIEnv* env,
jobject thiz,
jlong context,
jint value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt64)(JNIEnv* env,
jobject thiz,
jlong context,
jlong value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32)(
JNIEnv* env, jobject thiz, jlong context, jfloat value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat64)(
JNIEnv* env, jobject thiz, jlong context, jdouble value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateBool)(JNIEnv* env,
jobject thiz,
jlong context,
jboolean value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateString)(
JNIEnv* env, jobject thiz, jlong context, jstring value);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateVideoHeader)(
JNIEnv* env, jobject thiz, jlong context, jint width, jint height);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateTimeSeriesHeader)(
JNIEnv* env, jobject thiz, jlong context, jint num_channels,
jdouble sample_rate);
// Creates a MediaPipe::Matrix packet using the float array data.
// The data must in column major order.
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateMatrix)(
JNIEnv* env, jobject thiz, jlong context, jint rows, jint cols,
jfloatArray data);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateGpuBuffer)(
JNIEnv* env, jobject thiz, jlong context, jint name, jint width,
jint height, jobject texture_release_callback);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32Array)(
JNIEnv* env, jobject thiz, jlong context, jfloatArray data);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)(
JNIEnv* env, jobject thiz, jlong context, jintArray data);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCalculatorOptions)(
JNIEnv* env, jobject thiz, jlong context, jbyteArray data);
JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateCameraIntrinsics)(
JNIEnv* env, jobject thiz, jlong context, jfloat fx, jfloat fy, jfloat cx,
jfloat cy, jfloat width, jfloat height);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_CREATOR_JNI_H_

View File

@ -0,0 +1,347 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/matrix.h"
#include "mediapipe/framework/formats/time_series_header.pb.h"
#include "mediapipe/framework/formats/video_stream_header.h"
#include "mediapipe/framework/port/core_proto_inc.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
#ifndef MEDIAPIPE_DISABLE_GPU
#include "mediapipe/gpu/gl_calculator_helper.h"
#endif // !defined(MEDIAPIPE_DISABLE_GPU)
namespace {
template <typename T>
const T& GetFromNativeHandle(int64_t packet_handle) {
return mediapipe::android::Graph::GetPacketFromHandle(packet_handle).Get<T>();
}
} // namespace
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetPacketFromReference)(
JNIEnv* env, jobject thiz, jlong packet) {
mediapipe::Packet mediapipe_packet =
mediapipe::android::Graph::GetPacketFromHandle(packet)
.Get<std::unique_ptr<mediapipe::SyncedPacket>>()
->Get();
auto mediapipe_graph =
mediapipe::android::Graph::GetContextFromHandle(packet);
return mediapipe_graph->WrapPacketIntoContext(mediapipe_packet);
}
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetPairPackets)(
JNIEnv* env, jobject thiz, jlong packet) {
jlongArray return_handles = env->NewLongArray(2);
auto pair_packets =
GetFromNativeHandle<std::pair<mediapipe::Packet, mediapipe::Packet>>(
packet);
auto mediapipe_graph =
mediapipe::android::Graph::GetContextFromHandle(packet);
int64_t handles[2];
handles[0] = mediapipe_graph->WrapPacketIntoContext(pair_packets.first);
handles[1] = mediapipe_graph->WrapPacketIntoContext(pair_packets.second);
env->SetLongArrayRegion(return_handles, 0, 2,
reinterpret_cast<const jlong*>(handles));
return return_handles;
}
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetVectorPackets)(
JNIEnv* env, jobject thiz, jlong packet) {
auto vector_packets =
GetFromNativeHandle<std::vector<mediapipe::Packet>>(packet);
auto mediapipe_graph =
mediapipe::android::Graph::GetContextFromHandle(packet);
jlongArray return_handles = env->NewLongArray(vector_packets.size());
std::vector<int64_t> handles(vector_packets.size());
for (int i = 0; i < vector_packets.size(); ++i) {
handles[i] = mediapipe_graph->WrapPacketIntoContext(vector_packets[i]);
}
env->SetLongArrayRegion(return_handles, 0, handles.size(),
reinterpret_cast<const jlong*>(&(handles[0])));
return return_handles;
}
JNIEXPORT jshort JNICALL PACKET_GETTER_METHOD(nativeGetInt16)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<int16_t>(packet);
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetInt32)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<int32_t>(packet);
}
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetInt64)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<int64_t>(packet);
}
JNIEXPORT jfloat JNICALL PACKET_GETTER_METHOD(nativeGetFloat32)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<float>(packet);
}
JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(nativeGetFloat64)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<double>(packet);
}
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetBool)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<bool>(packet);
}
JNIEXPORT jstring JNICALL PACKET_GETTER_METHOD(nativeGetString)(JNIEnv* env,
jobject thiz,
jlong packet) {
const std::string& value = GetFromNativeHandle<std::string>(packet);
return env->NewStringUTF(value.c_str());
}
JNIEXPORT jbyteArray JNICALL
PACKET_GETTER_METHOD(nativeGetBytes)(JNIEnv* env, jobject thiz, jlong packet) {
const std::string& value = GetFromNativeHandle<std::string>(packet);
jbyteArray data = env->NewByteArray(value.length());
env->SetByteArrayRegion(data, 0, value.length(),
reinterpret_cast<const jbyte*>(value.c_str()));
return data;
}
JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetProtoBytes)(
JNIEnv* env, jobject thiz, jlong packet) {
mediapipe::Packet mediapipe_packet =
mediapipe::android::Graph::GetPacketFromHandle(packet);
const auto& proto_message = mediapipe_packet.GetProtoMessageLite();
std::string serialized;
proto_message.SerializeToString(&serialized);
jbyteArray data = env->NewByteArray(serialized.size());
env->SetByteArrayRegion(data, 0, serialized.size(),
reinterpret_cast<const jbyte*>(serialized.c_str()));
return data;
}
JNIEXPORT jshortArray JNICALL PACKET_GETTER_METHOD(nativeGetInt16Vector)(
JNIEnv* env, jobject thiz, jlong packet) {
const std::vector<int16_t>& values =
GetFromNativeHandle<std::vector<int16_t>>(packet);
jshortArray result = env->NewShortArray(values.size());
env->SetShortArrayRegion(result, 0, values.size(), &(values[0]));
return result;
}
JNIEXPORT jintArray JNICALL PACKET_GETTER_METHOD(nativeGetInt32Vector)(
JNIEnv* env, jobject thiz, jlong packet) {
const std::vector<int>& values =
GetFromNativeHandle<std::vector<int>>(packet);
jintArray result = env->NewIntArray(values.size());
env->SetIntArrayRegion(result, 0, values.size(), &(values[0]));
return result;
}
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetInt64Vector)(
JNIEnv* env, jobject thiz, jlong packet) {
const std::vector<int64_t>& values =
GetFromNativeHandle<std::vector<int64_t>>(packet);
jlongArray result = env->NewLongArray(values.size());
// 64 bit builds treat jlong as long long, and int64_t as long int, although
// both are 64 bits, but still need to use the reinterpret_cast to avoid the
// compiling error.
env->SetLongArrayRegion(result, 0, values.size(),
reinterpret_cast<const jlong*>(&(values[0])));
return result;
}
JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat32Vector)(
JNIEnv* env, jobject thiz, jlong packet) {
const std::vector<float>& values =
GetFromNativeHandle<std::vector<float>>(packet);
jfloatArray result = env->NewFloatArray(values.size());
env->SetFloatArrayRegion(result, 0, values.size(), &(values[0]));
return result;
}
JNIEXPORT jdoubleArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat64Vector)(
JNIEnv* env, jobject thiz, jlong packet) {
const std::vector<double>& values =
GetFromNativeHandle<std::vector<double>>(packet);
jdoubleArray result = env->NewDoubleArray(values.size());
env->SetDoubleArrayRegion(result, 0, values.size(), &(values[0]));
return result;
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageWidth)(JNIEnv* env,
jobject thiz,
jlong packet) {
const ::mediapipe::ImageFrame& image =
GetFromNativeHandle<::mediapipe::ImageFrame>(packet);
return image.Width();
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageHeight)(
JNIEnv* env, jobject thiz, jlong packet) {
const ::mediapipe::ImageFrame& image =
GetFromNativeHandle<::mediapipe::ImageFrame>(packet);
return image.Height();
}
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetImageData)(
JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer) {
const ::mediapipe::ImageFrame& image =
GetFromNativeHandle<::mediapipe::ImageFrame>(packet);
uint8* data = static_cast<uint8*>(env->GetDirectBufferAddress(byte_buffer));
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
// Assume byte buffer stores pixel data contiguously.
const int expected_buffer_size = image.Width() * image.Height() *
image.ByteDepth() * image.NumberOfChannels();
if (buffer_size != expected_buffer_size) {
LOG(ERROR) << "Expected buffer size " << expected_buffer_size
<< " got: " << buffer_size << ", width " << image.Width()
<< ", height " << image.Height() << ", channels "
<< image.NumberOfChannels();
return false;
}
image.CopyToBuffer(data, expected_buffer_size);
return true;
}
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)(
JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer) {
const ::mediapipe::ImageFrame& image =
GetFromNativeHandle<::mediapipe::ImageFrame>(packet);
uint8_t* rgba_data =
static_cast<uint8_t*>(env->GetDirectBufferAddress(byte_buffer));
int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer);
if (buffer_size != image.Width() * image.Height() * 4) {
LOG(ERROR) << "Buffer size has to be width*height*4\n"
<< "Image width: " << image.Width()
<< ", Image height: " << image.Height()
<< ", Buffer size: " << buffer_size << ", Buffer size needed: "
<< image.Width() * image.Height() * 4;
return false;
}
mediapipe::android::RgbToRgba(image.PixelData(), image.WidthStep(),
image.Width(), image.Height(), rgba_data,
image.Width() * 4, 255);
return true;
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderWidth)(
JNIEnv* env, jobject thiz, jlong packet) {
return GetFromNativeHandle<mediapipe::VideoHeader>(packet).width;
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderHeight)(
JNIEnv* env, jobject thiz, jlong packet) {
return GetFromNativeHandle<mediapipe::VideoHeader>(packet).height;
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(
nativeGetTimeSeriesHeaderNumChannels)(JNIEnv* env, jobject thiz,
jlong packet) {
return GetFromNativeHandle<mediapipe::TimeSeriesHeader>(packet)
.num_channels();
}
JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(
nativeGetTimeSeriesHeaderSampleRate)(JNIEnv* env, jobject thiz,
jlong packet) {
return GetFromNativeHandle<mediapipe::TimeSeriesHeader>(packet).sample_rate();
}
JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetAudioData)(
JNIEnv* env, jobject thiz, jlong packet) {
const ::mediapipe::Matrix& audio_mat =
GetFromNativeHandle<::mediapipe::Matrix>(packet);
int num_channels = audio_mat.rows();
int num_samples = audio_mat.cols();
int data_size = num_channels * num_samples * 2;
const int kMultiplier = 1 << 15;
jbyteArray byte_data = env->NewByteArray(data_size);
int offset = 0;
for (int sample = 0; sample < num_samples; ++sample) {
for (int channel = 0; channel < num_channels; ++channel) {
int16 value =
static_cast<int16>(audio_mat(channel, sample) * kMultiplier);
// The java and native has the same byte order, by default is little
// Endian, we can safely copy data directly, we have tests to cover this.
env->SetByteArrayRegion(byte_data, offset, 2,
reinterpret_cast<const jbyte*>(&value));
offset += 2;
}
}
return byte_data;
}
JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetMatrixData)(
JNIEnv* env, jobject thiz, jlong packet) {
const ::mediapipe::Matrix& audio_mat =
GetFromNativeHandle<::mediapipe::Matrix>(packet);
int rows = audio_mat.rows();
int cols = audio_mat.cols();
jfloatArray float_data = env->NewFloatArray(rows * cols);
env->SetFloatArrayRegion(float_data, 0, rows * cols,
reinterpret_cast<const jfloat*>(audio_mat.data()));
return float_data;
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixRows)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<::mediapipe::Matrix>(packet).rows();
}
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixCols)(JNIEnv* env,
jobject thiz,
jlong packet) {
return GetFromNativeHandle<::mediapipe::Matrix>(packet).cols();
}
#ifndef MEDIAPIPE_DISABLE_GPU
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetGpuBufferName)(
JNIEnv* env, jobject thiz, jlong packet) {
const mediapipe::GpuBuffer& gpu_buffer =
GetFromNativeHandle<mediapipe::GpuBuffer>(packet);
// gpu_buffer.name() returns a GLuint. Make sure the cast to jint is safe.
static_assert(sizeof(GLuint) <= sizeof(jint),
"The cast to jint may truncate GLuint");
return static_cast<jint>(gpu_buffer.GetGlTextureBufferSharedPtr()->name());
}
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetGpuBuffer)(JNIEnv* env,
jobject thiz,
jlong packet) {
const mediapipe::GpuBuffer& gpu_buffer =
GetFromNativeHandle<mediapipe::GpuBuffer>(packet);
const mediapipe::GlTextureBufferSharedPtr& ptr =
gpu_buffer.GetGlTextureBufferSharedPtr();
ptr->WaitUntilComplete();
return reinterpret_cast<intptr_t>(
new mediapipe::GlTextureBufferSharedPtr(ptr));
}
#endif // !defined(MEDIAPIPE_DISABLE_GPU)

View File

@ -0,0 +1,157 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define PACKET_GETTER_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_PacketGetter_##METHOD_NAME
// Get a native mediapipe packet.
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetPacketFromReference)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetPairPackets)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetVectorPackets)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jshort JNICALL PACKET_GETTER_METHOD(nativeGetInt16)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetInt32)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetInt64)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jfloat JNICALL PACKET_GETTER_METHOD(nativeGetFloat32)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(nativeGetFloat64)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetBool)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jstring JNICALL PACKET_GETTER_METHOD(nativeGetString)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetBytes)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetProtoBytes)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jshortArray JNICALL PACKET_GETTER_METHOD(nativeGetInt16Vector)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jintArray JNICALL PACKET_GETTER_METHOD(nativeGetInt32Vector)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jlongArray JNICALL PACKET_GETTER_METHOD(nativeGetInt64Vector)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat32Vector)(
JNIEnv* env, jobject thiz, jlong packet);
JNIEXPORT jdoubleArray JNICALL PACKET_GETTER_METHOD(nativeGetFloat64Vector)(
JNIEnv* env, jobject thiz, jlong packet);
// ImageFrame jni functions.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageWidth)(JNIEnv* env,
jobject thiz,
jlong packet);
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetImageHeight)(JNIEnv* env,
jobject thiz,
jlong packet);
// Before calling this, the byte_buffer needs to have the correct allocated
// size.
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetImageData)(
JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer);
// Before calling this, the byte_buffer needs to have the correct allocated
// size.
JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)(
JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer);
// Returns the width in VideoHeader packet.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderWidth)(
JNIEnv* env, jobject thiz, jlong packet);
// Returns the height in VideoHeader packet.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetVideoHeaderHeight)(
JNIEnv* env, jobject thiz, jlong packet);
// Gets the byte array from the MediaPipe audio data..
JNIEXPORT jbyteArray JNICALL PACKET_GETTER_METHOD(nativeGetAudioData)(
JNIEnv* env, jobject thiz, jlong packet);
// Gets number of channels in time series header packet.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(
nativeGetTimeSeriesHeaderNumChannels)(JNIEnv* env, jobject thiz,
jlong packet);
// Gets sampling rate in time series header packet.
JNIEXPORT jdouble JNICALL PACKET_GETTER_METHOD(
nativeGetTimeSeriesHeaderSampleRate)(JNIEnv* env, jobject thiz,
jlong packet);
// Returns the raw float array data for the MediaPipe Matrix.
// Note: MediaPipe::Matrix is column major matrix.
JNIEXPORT jfloatArray JNICALL PACKET_GETTER_METHOD(nativeGetMatrixData)(
JNIEnv* env, jobject thiz, jlong packet);
// Returns the number of rows of the matrix.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixRows)(JNIEnv* env,
jobject thiz,
jlong packet);
// Returns the number of cols of the matrix.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetMatrixCols)(JNIEnv* env,
jobject thiz,
jlong packet);
// Returns the GL texture name of the mediapipe::GpuBuffer.
JNIEXPORT jint JNICALL PACKET_GETTER_METHOD(nativeGetGpuBufferName)(
JNIEnv* env, jobject thiz, jlong packet);
// Returns a mediapipe::GlTextureBufferSharedPtr*.
// This will survive independently of the packet.
JNIEXPORT jlong JNICALL PACKET_GETTER_METHOD(nativeGetGpuBuffer)(JNIEnv* env,
jobject thiz,
jlong packet);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_PACKET_GETTER_JNI_H_

View File

@ -0,0 +1,131 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "absl/synchronization/mutex.h"
#ifdef __ANDROID__
#include <android/native_window_jni.h>
#endif // __ANDROID__
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/gpu/egl_surface_holder.h"
#include "mediapipe/gpu/gpu_shared_data_internal.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h"
#include "mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.h"
// TODO: CHECK in JNI does not work. Raise exception instead.
namespace {
mediapipe::EglSurfaceHolder* GetSurfaceHolder(jlong packet) {
return mediapipe::android::Graph::GetPacketFromHandle(packet)
.Get<std::unique_ptr<mediapipe::EglSurfaceHolder>>()
.get();
}
mediapipe::GlContext* GetGlContext(jlong context) {
auto mediapipe_graph = reinterpret_cast<mediapipe::android::Graph*>(context);
mediapipe::GpuResources* gpu_resources = mediapipe_graph->GetGpuResources();
return gpu_resources ? gpu_resources->gl_context().get() : nullptr;
}
} // namespace
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetFlipY)(
JNIEnv* env, jobject thiz, jlong packet, jboolean flip) {
mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet);
surface_holder->flip_y = flip;
}
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)(
JNIEnv* env, jobject thiz, jlong context, jlong packet, jobject surface) {
#ifdef __ANDROID__
mediapipe::GlContext* gl_context = GetGlContext(context);
CHECK(gl_context) << "GPU shared data not created";
mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet);
// ANativeWindow_fromSurface must not be called on the GL thread, it is a
// JNI call.
ANativeWindow* window = nullptr;
if (surface) {
window = ANativeWindow_fromSurface(env, surface);
}
auto status = gl_context->Run(
[gl_context, surface_holder, surface, window]() -> ::mediapipe::Status {
absl::MutexLock lock(&surface_holder->mutex);
// Must destroy old surface first in case we are assigning the same
// surface.
// TODO: keep a ref to Java object and short-circuit if same?
if (surface_holder->owned) {
// NOTE: according to the eglDestroySurface documentation, the surface
// is destroyed immediately "if it is not current on any thread". This
// surface is only made current by the SurfaceSinkCalculator while it
// holds the surface_holder->mutex, so at this point we know it is not
// current on any thread, and we can rely on it being destroyed
// immediately.
RET_CHECK(eglDestroySurface(gl_context->egl_display(),
surface_holder->surface))
<< "eglDestroySurface failed:" << eglGetError();
}
EGLSurface egl_surface = EGL_NO_SURFACE;
if (surface) {
EGLint surface_attr[] = {EGL_NONE};
egl_surface = eglCreateWindowSurface(gl_context->egl_display(),
gl_context->egl_config(), window,
surface_attr);
RET_CHECK(egl_surface != EGL_NO_SURFACE)
<< "eglCreateWindowSurface() returned error:" << eglGetError();
}
surface_holder->surface = egl_surface;
surface_holder->owned = egl_surface != EGL_NO_SURFACE;
return ::mediapipe::OkStatus();
});
MEDIAPIPE_CHECK_OK(status);
if (window) {
VLOG(2) << "releasing window";
ANativeWindow_release(window);
}
#else
LOG(FATAL) << "setSurface is only supported on Android";
#endif // __ANDROID__
}
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetEglSurface)(
JNIEnv* env, jobject thiz, jlong context, jlong packet, jlong surface) {
mediapipe::GlContext* gl_context = GetGlContext(context);
CHECK(gl_context) << "GPU shared data not created";
auto egl_surface = reinterpret_cast<EGLSurface>(surface);
mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet);
EGLSurface old_surface = EGL_NO_SURFACE;
{
absl::MutexLock lock(&surface_holder->mutex);
if (surface_holder->owned) {
old_surface = surface_holder->surface;
}
surface_holder->surface = egl_surface;
surface_holder->owned = false;
}
if (old_surface != EGL_NO_SURFACE) {
MEDIAPIPE_CHECK_OK(
gl_context->Run([gl_context, old_surface]() -> ::mediapipe::Status {
RET_CHECK(eglDestroySurface(gl_context->egl_display(), old_surface))
<< "eglDestroySurface failed:" << eglGetError();
return ::mediapipe::OkStatus();
}));
}
}

View File

@ -0,0 +1,42 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_
#define JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
#define MEDIAPIPE_SURFACE_OUTPUT_METHOD(METHOD_NAME) \
Java_com_google_mediapipe_framework_SurfaceOutput_##METHOD_NAME
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetFlipY)(
JNIEnv* env, jobject thiz, jlong packet, jboolean flip);
#ifdef __ANDROID__
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)(
JNIEnv* env, jobject thiz, jlong context, jlong packet, jobject surface);
#endif // __ANDROID__
JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetEglSurface)(
JNIEnv* env, jobject thiz, jlong context, jlong packet, jlong surface);
#ifdef __cplusplus
} // extern "C"
#endif // __cplusplus
#endif // JAVA_COM_GOOGLE_MEDIAPIPE_FRAMEWORK_JNI_SURFACE_OUTPUT_JNI_H_

View File

@ -0,0 +1,26 @@
# Additional flags to pass to Proguard when processing a binary that uses
# MediaPipe.
# Keep public members of our public interfaces. This also prevents the
# obfuscation of the corresponding methods in classes implementing them,
# such as implementations of PacketCallback#process.
-keep public interface com.google.mediapipe.framework.* {
public *;
}
# This method is invoked by native code.
-keep public class com.google.mediapipe.framework.Packet {
public static *** create(***);
public long getNativeHandle();
public void release();
}
# This method is invoked by native code.
-keep public class com.google.mediapipe.framework.PacketCreator {
*** releaseWithSyncToken(...);
}
# This method is invoked by native code.
-keep public class com.google.mediapipe.framework.MediaPipeException {
<init>(int, byte[]);
}

View File

@ -0,0 +1,32 @@
# Copyright 2019 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"]) # Apache 2.0
# OpenGL utilities.
# TODO: move Compat.java in here, remove dep
# TODO: add tests
android_library(
name = "glutil",
srcs = glob(["**/*.java"]),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/java/com/google/mediapipe/framework:android_framework",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
],
)

View File

@ -0,0 +1,81 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import java.nio.FloatBuffer;
/** Collection of common simple shaders and related resources. */
public class CommonShaders {
/**
* Shader for rendering a simple geometry.
*
* <p>Keeps the vertices that are passed in, and applies a transformation to the texture (pass an
* identity matrix if not needed). The transformation uniform is there to support {@link
* android.graphics.SurfaceTexture#getTransformMatrix}.
*/
public static final String VERTEX_SHADER =
"uniform mat4 texture_transform;\n"
+ "attribute vec4 position;\n"
+ "attribute mediump vec4 texture_coordinate;\n"
+ "varying mediump vec2 sample_coordinate;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = position;\n"
+ " sample_coordinate = (texture_transform * texture_coordinate).xy;\n"
+ "}";
/** Simple fragment shader that renders a 2D texture. */
public static final String FRAGMENT_SHADER =
"varying mediump vec2 sample_coordinate;\n"
+ "uniform sampler2D video_frame;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(video_frame, sample_coordinate);\n"
+ "}";
/**
* Simple fragment shader that renders a texture bound to the {@link
* android.opengl.GLES11Ext#GL_TEXTURE_EXTERNAL_OES} target. See {@link
* android.graphics.SurfaceTexture}.
*/
public static final String FRAGMENT_SHADER_EXTERNAL =
"#extension GL_OES_EGL_image_external : require\n"
+ "varying mediump vec2 sample_coordinate;\n"
+ "uniform samplerExternalOES video_frame;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(video_frame, sample_coordinate);\n"
+ "}";
/**
* Vertices for a quad that fills the drawing area.
*
* <p>Can be used directly with {@link android.opengl.GLES10#glDrawArrays}.
*/
public static final FloatBuffer SQUARE_VERTICES =
ShaderUtil.floatBuffer(
-1.0f, -1.0f, // bottom left
1.0f, -1.0f, // bottom right
-1.0f, 1.0f, // top left
1.0f, 1.0f // top right
);
/**
* Vertices for a quad that fills the drawing area, but rotated 90 degrees.
*/
public static final FloatBuffer ROTATED_SQUARE_VERTICES =
ShaderUtil.floatBuffer(
-1.0f, 1.0f, // top left
-1.0f, -1.0f, // bottom left
1.0f, 1.0f, // top right
1.0f, -1.0f // bottom right
);
}

View File

@ -0,0 +1,408 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.google.mediapipe.framework.Compat;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Helper class for creating and managing an {@link EGLContext}.
*
* <p>Note: Since we want to support API level 16, we cannot rely on {@link android.opengl.EGL14}.
*/
public class EglManager {
private static final String TAG = "EglManager";
// These are missing from EGL10.
public static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public static final int EGL_OPENGL_ES2_BIT = 0x4;
public static final int EGL_OPENGL_ES3_BIT_KHR = 0x00000040;
public static final int EGL_DRAW = 12377;
public static final int EGL_READ = 12378;
public static final int EGL14_API_LEVEL = android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
private EGL10 egl;
private EGLDisplay eglDisplay = EGL10.EGL_NO_DISPLAY;
private EGLConfig eglConfig = null;
private EGLContext eglContext = EGL10.EGL_NO_CONTEXT;
private int[] singleIntArray; // reuse this instead of recreating it each time
private int glVersion;
private long nativeEglContext = 0;
private android.opengl.EGLContext egl14Context = null;
/**
* Creates an EglManager wrapping a new {@link EGLContext}.
*
* @param parentContext another EGL context with which to share data (e.g. textures); can be an
* {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null.
*/
public EglManager(@Nullable Object parentContext) {
this(parentContext, null);
}
/**
* Creates an EglManager wrapping a new {@link EGLContext}.
*
* @param parentContext another EGL context with which to share data (e.g. textures); can be an
* {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null.
* @param additionalConfigAttributes a list of attributes for eglChooseConfig to be added to the
* default ones.
*/
public EglManager(@Nullable Object parentContext, @Nullable int[] additionalConfigAttributes) {
singleIntArray = new int[1];
egl = (EGL10) EGLContext.getEGL();
eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed");
}
EGLContext realParentContext;
if (parentContext == null) {
realParentContext = EGL10.EGL_NO_CONTEXT;
} else if (parentContext instanceof EGLContext) {
realParentContext = (EGLContext) parentContext;
} else if (Build.VERSION.SDK_INT >= EGL14_API_LEVEL
&& parentContext instanceof android.opengl.EGLContext) {
if (parentContext == EGL14.EGL_NO_CONTEXT) {
realParentContext = EGL10.EGL_NO_CONTEXT;
} else {
realParentContext = egl10ContextFromEgl14Context((android.opengl.EGLContext) parentContext);
}
} else {
throw new RuntimeException("invalid parent context: " + parentContext);
}
// Try to create an OpenGL ES 3 context first, then fall back on ES 2.
try {
createContext(realParentContext, 3, additionalConfigAttributes);
glVersion = 3;
} catch (RuntimeException e) {
Log.w(TAG, "could not create GLES 3 context: " + e);
createContext(realParentContext, 2, additionalConfigAttributes);
glVersion = 2;
}
}
/** Returns the managed {@link EGLContext} */
public EGLContext getContext() {
return eglContext;
}
/** Returns the native handle to the context. */
public long getNativeContext() {
if (nativeEglContext == 0) {
grabContextVariants();
}
return nativeEglContext;
}
public android.opengl.EGLContext getEgl14Context() {
if (Build.VERSION.SDK_INT < EGL14_API_LEVEL) {
throw new RuntimeException("cannot use EGL14 on API level < 17");
}
if (egl14Context == null) {
grabContextVariants();
}
return egl14Context;
}
public int getGlMajorVersion() {
return glVersion;
}
/** Makes this the current EGL context on the current thread. */
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (!egl.eglMakeCurrent(eglDisplay, drawSurface, readSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/** Makes no EGL context current on the current thread. */
public void makeNothingCurrent() {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Creates an {@link EGLSurface} for an Android Surface.
*
* @param surface can be a {@link Surface}, {@link SurfaceTexture}, {@link SurfaceHolder} or
* {@link SurfaceView}.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface
|| surface instanceof SurfaceTexture
|| surface instanceof SurfaceHolder
|| surface instanceof SurfaceView)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {EGL10.EGL_NONE};
EGLSurface eglSurface =
egl.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Creates an {@link EGLSurface} for offscreen rendering, not bound to any Android surface.
*
* <p>An EGLSurface is always required to make an EGLContext current, and it is bound to the
* OpenGL framebuffer by default. However, the framebuffer can then be bound to other objects in
* OpenGL, such as a texture.
* <p>If you want to use an EGLContext but do not really care about the EGLSurface, you can use
* a 1x1 surface created with this method.
*/
public EGLSurface createOffscreenSurface(int width, int height) {
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
EGLSurface eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/** Releases the resources held by this manager. */
public void release() {
if (eglDisplay != EGL10.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
egl.eglDestroyContext(eglDisplay, eglContext);
egl.eglTerminate(eglDisplay);
}
eglDisplay = EGL10.EGL_NO_DISPLAY;
eglContext = EGL10.EGL_NO_CONTEXT;
eglConfig = null;
}
/** Releases an {@link EGLSurface}. */
public void releaseSurface(EGLSurface eglSurface) {
egl.eglDestroySurface(eglDisplay, eglSurface);
}
private void createContext(
EGLContext parentContext, int glVersion, @Nullable int[] additionalConfigAttributes) {
eglConfig = getConfig(glVersion, additionalConfigAttributes);
if (eglConfig == null) {
throw new RuntimeException("Unable to find a suitable EGLConfig");
}
// Try to create an OpenGL ES 3 context first.
int[] contextAttrs = {EGL_CONTEXT_CLIENT_VERSION, glVersion, EGL10.EGL_NONE};
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, parentContext, contextAttrs);
if (eglContext == null) {
int error = egl.eglGetError();
throw new RuntimeException(
"Could not create GL context: EGL error: 0x"
+ Integer.toHexString(error)
+ (error == EGL10.EGL_BAD_CONTEXT
? ": parent context uses a different version of OpenGL"
: ""));
}
}
/**
* Gets the native context handle for our context. The EGL10 API does not provide a way to do this
* directly, but we can make our context current and grab the current context handle from native
* code. Also gets the context as an {@link android.opengl.EGLContext}. The underlying native
* object is always the same, but the Android API has two different wrappers for it which are
* completely equivalent internally but completely separate at the Java level.
*/
private void grabContextVariants() {
EGLContext previousContext = egl.eglGetCurrentContext();
EGLDisplay previousDisplay = egl.eglGetCurrentDisplay();
EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL_DRAW);
EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL_READ);
EGLSurface tempEglSurface = null;
if (previousContext != eglContext) {
tempEglSurface = createOffscreenSurface(1, 1);
makeCurrent(tempEglSurface, tempEglSurface);
}
nativeEglContext = Compat.getCurrentNativeEGLContext();
if (Build.VERSION.SDK_INT >= EGL14_API_LEVEL) {
egl14Context = android.opengl.EGL14.eglGetCurrentContext();
}
if (previousContext != eglContext) {
egl.eglMakeCurrent(
previousDisplay, previousDrawSurface, previousReadSurface, previousContext);
releaseSurface(tempEglSurface);
}
}
private EGLContext egl10ContextFromEgl14Context(android.opengl.EGLContext context) {
android.opengl.EGLContext previousContext = EGL14.eglGetCurrentContext();
android.opengl.EGLDisplay previousDisplay = EGL14.eglGetCurrentDisplay();
android.opengl.EGLSurface previousDrawSurface = EGL14.eglGetCurrentSurface(EGL_DRAW);
android.opengl.EGLSurface previousReadSurface = EGL14.eglGetCurrentSurface(EGL_READ);
android.opengl.EGLDisplay defaultDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
android.opengl.EGLSurface tempEglSurface = null;
if (!previousContext.equals(context)) {
int[] surfaceAttribs = {EGL14.EGL_WIDTH, 1, EGL14.EGL_HEIGHT, 1, EGL14.EGL_NONE};
android.opengl.EGLConfig tempConfig = getThrowawayConfig(defaultDisplay);
tempEglSurface =
EGL14.eglCreatePbufferSurface(previousDisplay, tempConfig, surfaceAttribs, 0);
EGL14.eglMakeCurrent(defaultDisplay, tempEglSurface, tempEglSurface, context);
}
EGLContext egl10Context = egl.eglGetCurrentContext();
if (!previousContext.equals(context)) {
EGL14.eglMakeCurrent(
previousDisplay, previousDrawSurface, previousReadSurface, previousContext);
EGL14.eglDestroySurface(defaultDisplay, tempEglSurface);
}
return egl10Context;
}
private android.opengl.EGLConfig getThrowawayConfig(android.opengl.EGLDisplay display) {
int[] attribList = {
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT | EGL10.EGL_WINDOW_BIT, EGL10.EGL_NONE
};
android.opengl.EGLConfig[] configs = new android.opengl.EGLConfig[1];
int[] numConfigs = singleIntArray;
if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, 1, numConfigs, 0)) {
throw new IllegalArgumentException("eglChooseConfig failed");
}
if (numConfigs[0] <= 0) {
throw new IllegalArgumentException("No configs match requested attributes");
}
return configs[0];
}
/**
* Merges two EGL attribute lists. The second list may be null. Values in the second list override
* those with the same key in the first list.
*/
private int[] mergeAttribLists(int[] list1, @Nullable int[] list2) {
if (list2 == null) {
return list1;
}
HashMap<Integer, Integer> attribMap = new HashMap<>();
for (int[] list : new int[][] {list1, list2}) {
for (int i = 0; i < list.length / 2; i++) {
int key = list[2 * i];
int value = list[2 * i + 1];
if (key == EGL10.EGL_NONE) {
break;
}
attribMap.put(key, value);
}
}
int[] merged = new int[attribMap.size() * 2 + 1];
int i = 0;
for (Map.Entry<Integer, Integer> e : attribMap.entrySet()) {
merged[i++] = e.getKey();
merged[i++] = e.getValue();
}
merged[i] = EGL10.EGL_NONE;
return merged;
}
private EGLConfig getConfig(int glVersion, @Nullable int[] additionalConfigAttributes) {
int[] baseAttribList = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 16,
EGL10.EGL_RENDERABLE_TYPE, glVersion == 3 ? EGL_OPENGL_ES3_BIT_KHR : EGL_OPENGL_ES2_BIT,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT | EGL10.EGL_WINDOW_BIT,
EGL10.EGL_NONE
};
int[] attribList = mergeAttribLists(baseAttribList, additionalConfigAttributes);
// First count the matching configs. Note that eglChooseConfig will return configs that
// match *or exceed* the requirements, and will put the ones that exceed first!
int[] numConfigs = singleIntArray;
if (!egl.eglChooseConfig(eglDisplay, attribList, null, 0, numConfigs)) {
throw new IllegalArgumentException("eglChooseConfig failed");
}
if (numConfigs[0] <= 0) {
throw new IllegalArgumentException("No configs match requested attributes");
}
EGLConfig[] configs = new EGLConfig[numConfigs[0]];
if (!egl.eglChooseConfig(eglDisplay, attribList, configs, configs.length, numConfigs)) {
throw new IllegalArgumentException("eglChooseConfig#2 failed");
}
// Try to find a config that matches our bit sizes exactly.
EGLConfig bestConfig = null;
for (EGLConfig config : configs) {
int r = findConfigAttrib(config, EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(config, EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(config, EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(config, EGL10.EGL_ALPHA_SIZE, 0);
if ((r == 8) && (g == 8) && (b == 8) && (a == 8)) {
bestConfig = config;
break;
}
}
if (bestConfig == null) {
bestConfig = configs[0];
}
return bestConfig;
}
private void checkEglError(String msg) {
int error;
if ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
private int findConfigAttrib(EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(eglDisplay, config, attribute, singleIntArray)) {
return singleIntArray[0];
}
return defaultValue;
}
}

View File

@ -0,0 +1,143 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.HashMap;
import java.util.Map;
/**
* Textures from {@link SurfaceTexture} are only supposed to be bound to target {@link
* GLES11Ext#GL_TEXTURE_EXTERNAL_OES}, which is accessed using samplerExternalOES in the shader.
* This means they cannot be used with a regular shader that expects a sampler2D. This class renders
* the external texture to the current framebuffer. By binding the framebuffer to a texture, this
* can be used to convert the input into a normal 2D texture.
*/
public class ExternalTextureRenderer {
private static final FloatBuffer TEXTURE_VERTICES =
ShaderUtil.floatBuffer(
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.0f, 1.0f, // top left
1.0f, 1.0f // top right
);
private static final FloatBuffer FLIPPED_TEXTURE_VERTICES =
ShaderUtil.floatBuffer(
0.0f, 1.0f, // top left
1.0f, 1.0f, // top right
0.0f, 0.0f, // bottom left
1.0f, 0.0f // bottom right
);
private static final String TAG = "ExternalTextureRend"; // Max length of a tag is 23.
private static final int ATTRIB_POSITION = 1;
private static final int ATTRIB_TEXTURE_COORDINATE = 2;
private int program = 0;
private int frameUniform;
private int textureTransformUniform;
private float[] textureTransformMatrix = new float[16];
private boolean flipY;
/** Call this to setup the shader program before rendering. */
public void setup() {
Map<String, Integer> attributeLocations = new HashMap<>();
attributeLocations.put("position", ATTRIB_POSITION);
attributeLocations.put("texture_coordinate", ATTRIB_TEXTURE_COORDINATE);
program =
ShaderUtil.createProgram(
CommonShaders.VERTEX_SHADER,
CommonShaders.FRAGMENT_SHADER_EXTERNAL,
attributeLocations);
frameUniform = GLES20.glGetUniformLocation(program, "video_frame");
textureTransformUniform = GLES20.glGetUniformLocation(program, "texture_transform");
ShaderUtil.checkGlError("glGetUniformLocation");
}
/**
* Flips rendering output vertically, useful for conversion between coordinate systems with
* top-left v.s. bottom-left origins. Effective in subsequent {@link #render(SurfaceTexture)}
* calls.
*/
public void setFlipY(boolean flip) {
flipY = flip;
}
/**
* Renders the surfaceTexture to the framebuffer with optional vertical flip.
*
* <p>Before calling this, {@link #setup} must have been called.
*
* <p>NOTE: Calls {@link SurfaceTexture#updateTexImage()} on passed surface texture.
*/
public void render(SurfaceTexture surfaceTexture) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
ShaderUtil.checkGlError("glActiveTexture");
surfaceTexture.updateTexImage(); // This implicitly binds the texture.
surfaceTexture.getTransformMatrix(textureTransformMatrix);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
ShaderUtil.checkGlError("glTexParameteri");
GLES20.glUseProgram(program);
ShaderUtil.checkGlError("glUseProgram");
GLES20.glUniform1i(frameUniform, 0);
ShaderUtil.checkGlError("glUniform1i");
GLES20.glUniformMatrix4fv(textureTransformUniform, 1, false, textureTransformMatrix, 0);
ShaderUtil.checkGlError("glUniformMatrix4fv");
GLES20.glEnableVertexAttribArray(ATTRIB_POSITION);
GLES20.glVertexAttribPointer(
ATTRIB_POSITION, 2, GLES20.GL_FLOAT, false, 0, CommonShaders.SQUARE_VERTICES);
GLES20.glEnableVertexAttribArray(ATTRIB_TEXTURE_COORDINATE);
GLES20.glVertexAttribPointer(
ATTRIB_TEXTURE_COORDINATE,
2,
GLES20.GL_FLOAT,
false,
0,
flipY ? FLIPPED_TEXTURE_VERTICES : TEXTURE_VERTICES);
ShaderUtil.checkGlError("program setup");
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
ShaderUtil.checkGlError("glDrawArrays");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
ShaderUtil.checkGlError("glBindTexture");
// TODO: add sync and go back to glFlush()
GLES20.glFinish();
}
/**
* Call this to delete the shader program.
*
* <p>This is only necessary if one wants to release the program while keeping the context around.
*/
public void release() {
GLES20.glDeleteProgram(program);
}
}

View File

@ -0,0 +1,215 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import javax.annotation.Nullable;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLSurface;
/**
* A thread that manages an OpenGL context.
*
* <p>A given context can only be used by a single thread at a time. Furthermore, on at least some
* Android devices, changing the current context is reported to take a long time (on the order of
* several ms). Therefore it is often convenient to have a dedicated thread for rendering to a
* context.
*/
public class GlThread extends Thread {
private static final String TAG = "GlThread";
private static final String THREAD_NAME = "mediapipe.glutil.GlThread";
private boolean ready;
private final Object startLock = new Object();
protected volatile EglManager eglManager;
protected EGLSurface eglSurface = null;
protected Handler handler = null; // must be created on the thread itself
protected Looper looper = null; // must be created on the thread itself
protected int framebuffer = 0;
/**
* Creates a GlThread.
*
* @param parentContext another EGL context with which to share data (e.g. textures); can be an
* {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null.
*/
public GlThread(@Nullable Object parentContext) {
this(parentContext, null);
}
/**
* Creates a GlThread.
*
* @param parentContext another EGL context with which to share data (e.g. textures); can be an
* {@link EGLContext} or an {@link android.opengl.EGLContext}; can be null.
* @param additionalConfigAttributes a list of attributes for eglChooseConfig to be added to the
* default ones.
*/
public GlThread(@Nullable Object parentContext, @Nullable int[] additionalConfigAttributes) {
eglManager = new EglManager(parentContext, additionalConfigAttributes);
setName(THREAD_NAME);
}
/**
* Returns the Handler associated with this thread.
*/
public Handler getHandler() {
return handler;
}
/** Returns the Looper associated with this thread. */
public Looper getLooper() {
return looper;
}
/** Returns the EglManager managing this thread's context. */
public EglManager getEglManager() {
return eglManager;
}
/**
* Returns the EGLContext associated with this thread. Do not use it on another thread.
*/
public EGLContext getEGLContext() {
return eglManager.getContext();
}
/**
* Returns the framebuffer object used by this thread.
*/
public int getFramebuffer() {
return framebuffer;
}
/**
* Binds a texture to the color attachment of the framebuffer.
*/
public void bindFramebuffer(int texture, int width, int height) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer);
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
texture,
0);
int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new RuntimeException("Framebuffer not complete, status=" + status);
}
GLES20.glViewport(0, 0, width, height);
ShaderUtil.checkGlError("glViewport");
}
@Override
public void run() {
Looper.prepare();
handler = createHandler();
looper = Looper.myLooper();
Log.d(TAG, String.format("Starting GL thread %s", getName()));
prepareGl();
synchronized (startLock) {
ready = true;
startLock.notify(); // signal waitUntilReady()
}
Looper.loop();
looper = null;
releaseGl();
eglManager.release();
Log.d(TAG, String.format("Stopping GL thread %s", getName()));
synchronized (startLock) {
ready = false;
}
}
/** Terminates the thread, after processing all pending messages. */
public boolean quitSafely() {
if (looper == null) {
return false;
}
looper.quitSafely();
return true;
}
/**
* Waits until the thread has finished setting up the handler and invoking {@link prepareGl}.
*/
public void waitUntilReady() throws InterruptedException {
// We wait in a loop to deal with spurious wakeups. However, we do not
// catch the InterruptedException, because we have no way of knowing what
// the application expects. On one hand, the called expects the thread to
// be ready when this method returns, which means we would have to keep
// looping. But on the other hand, if they interrupt the thread they may
// not want it to continue execution. We have no choice but to propagate
// the exception and let the caller make the decision.
synchronized (startLock) {
while (!ready) {
startLock.wait();
}
}
}
/** Sets up the OpenGL context. Can be overridden to set up additional resources. */
public void prepareGl() {
eglSurface = createEglSurface();
eglManager.makeCurrent(eglSurface, eglSurface);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_CULL_FACE);
int[] values = new int[1];
GLES20.glGenFramebuffers(1, values, 0);
framebuffer = values[0];
}
/** Releases the resources created in prepareGl. */
public void releaseGl() {
if (framebuffer != 0) {
int[] values = new int[1];
values[0] = framebuffer;
GLES20.glDeleteFramebuffers(1, values, 0);
framebuffer = 0;
}
eglManager.makeNothingCurrent();
if (eglSurface != null) {
eglManager.releaseSurface(eglSurface);
eglSurface = null;
}
}
/**
* Factory method that creates the handler used by the thread. Can be overridden to use a custom
* {@link Handler}.
*/
protected Handler createHandler() {
return new Handler();
}
/** Factory method that creates the surface used by the thread. */
protected EGLSurface createEglSurface() {
return eglManager.createOffscreenSurface(1, 1);
}
}

View File

@ -0,0 +1,178 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import com.google.common.flogger.FluentLogger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Utility class for managing GLSL shaders.
*/
public class ShaderUtil {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
/**
* Loads a shader from source.
* @param shaderType a valid GL shader type, e.g. {@link GLES20#GL_VERTEX_SHADER} or
* {@link GLES20#GL_FRAGMENT_SHADER}.
* @param source the shader's source in text form.
* @return a handle to the created shader, or 0 in case of error.
*/
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
logger.atSevere().log("Could not compile shader %d: %s", shaderType,
GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
/**
* Creates a shader program.
*
* @param vertexSource source of the vertex shader.
* @param fragmentSource source of the fragment shader.
* @param attributeLocations a map of desired locations for attributes. Can be null.
* @return a handle to the created program, or 0 in case of error.
*/
public static int createProgram(
String vertexSource,
String fragmentSource,
@Nullable Map<String, Integer> attributeLocations) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program == 0) {
logger.atSevere().log("Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
if (attributeLocations != null) {
for (Map.Entry<String, Integer> entry : attributeLocations.entrySet()) {
GLES20.glBindAttribLocation(program, entry.getValue(), entry.getKey());
}
}
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
logger.atSevere().log("Could not link program: %s", GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
/**
* Creates a texture. Binds it to texture unit 0 to perform setup.
* @return the name of the new texture.
*/
public static int createRgbaTexture(int width, int height) {
final int[] textureName = new int[] {0};
GLES20.glGenTextures(1, textureName, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName[0]);
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D,
0,
GLES20.GL_RGBA,
width, height,
0,
GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE,
null);
ShaderUtil.checkGlError("glTexImage2D");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
ShaderUtil.checkGlError("texture setup");
return textureName[0];
}
/**
* Creates a texture from a Bitmap. Binds it to texture unit 0 to perform setup.
*
* @return the name of the new texture.
*/
public static int createRgbaTexture(Bitmap bitmap) {
final int[] textureName = new int[] {0};
GLES20.glGenTextures(1, textureName, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
ShaderUtil.checkGlError("texImage2D");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
ShaderUtil.checkGlError("texture setup");
return textureName[0];
}
/**
* Creates a {@link FloatBuffer} with the given arguments as contents.
* The buffer is created in native format for efficient use with OpenGL.
*/
public static FloatBuffer floatBuffer(float... values) {
ByteBuffer byteBuffer =
ByteBuffer.allocateDirect(
values.length * 4 /* sizeof(float) */);
// use the device hardware's native byte order
byteBuffer.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
FloatBuffer floatBuffer = byteBuffer.asFloatBuffer();
// add the coordinates to the FloatBuffer
floatBuffer.put(values);
// set the buffer to read the first coordinate
floatBuffer.position(0);
return floatBuffer;
}
/**
* Calls {@link GLES20#glGetError} and raises an exception if there was an error.
*/
public static void checkGlError(String msg) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw new RuntimeException(msg + ": GL error: 0x" + Integer.toHexString(error));
}
}
}

View File

@ -0,0 +1,105 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.mediapipe.glutil;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.FloatBuffer;
import java.util.HashMap;
import java.util.Map;
/** Simple renderer for a normal 2D texture. */
public class TextureRenderer {
private static final FloatBuffer TEXTURE_VERTICES =
ShaderUtil.floatBuffer(
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.0f, 1.0f, // top left
1.0f, 1.0f // top right
);
private static final String TAG = "TextureRenderer";
private static final int ATTRIB_POSITION = 1;
private static final int ATTRIB_TEXTURE_COORDINATE = 2;
private int program = 0;
private int frameUniform;
private int textureTransformUniform;
private float[] textureTransformMatrix = new float[16];
/** Call this to setup the shader program before rendering. */
public void setup() {
Map<String, Integer> attributeLocations = new HashMap<>();
attributeLocations.put("position", ATTRIB_POSITION);
attributeLocations.put("texture_coordinate", ATTRIB_TEXTURE_COORDINATE);
program =
ShaderUtil.createProgram(
CommonShaders.VERTEX_SHADER, CommonShaders.FRAGMENT_SHADER, attributeLocations);
frameUniform = GLES20.glGetUniformLocation(program, "video_frame");
textureTransformUniform = GLES20.glGetUniformLocation(program, "texture_transform");
ShaderUtil.checkGlError("glGetUniformLocation");
Matrix.setIdentityM(textureTransformMatrix, 0 /* offset */);
}
/**
* Renders a texture to the framebuffer.
*
* <p>Before calling this, {@link #setup} must have been called.
*/
public void render(int textureName) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
ShaderUtil.checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureName);
ShaderUtil.checkGlError("glBindTexture");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
ShaderUtil.checkGlError("glTexParameteri");
GLES20.glUseProgram(program);
ShaderUtil.checkGlError("glUseProgram");
GLES20.glUniform1i(frameUniform, 0);
ShaderUtil.checkGlError("glUniform1i");
GLES20.glUniformMatrix4fv(textureTransformUniform, 1, false, textureTransformMatrix, 0);
ShaderUtil.checkGlError("glUniformMatrix4fv");
GLES20.glEnableVertexAttribArray(ATTRIB_POSITION);
GLES20.glVertexAttribPointer(
ATTRIB_POSITION, 2, GLES20.GL_FLOAT, false, 0, CommonShaders.SQUARE_VERTICES);
GLES20.glEnableVertexAttribArray(ATTRIB_TEXTURE_COORDINATE);
GLES20.glVertexAttribPointer(
ATTRIB_TEXTURE_COORDINATE, 2, GLES20.GL_FLOAT, false, 0, TEXTURE_VERTICES);
ShaderUtil.checkGlError("program setup");
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
ShaderUtil.checkGlError("glDrawArrays");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
ShaderUtil.checkGlError("glBindTexture");
GLES20.glFlush();
}
/**
* Call this to delete the shader program.
*
* <p>This is only necessary if one wants to release the program while keeping the context around.
*/
public void release() {
GLES20.glDeleteProgram(program);
}
}

View File

@ -0,0 +1,668 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mediapipe/gpu/copy_calculator.proto
package mediapipe;
public final class CopyCalculator {
private CopyCalculator() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
registry.add(mediapipe.CopyCalculator.CopyCalculatorOptions.ext);
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface CopyCalculatorOptionsOrBuilder extends
// @@protoc_insertion_point(interface_extends:mediapipe.CopyCalculatorOptions)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
boolean hasRotation();
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation();
}
/**
* Protobuf type {@code mediapipe.CopyCalculatorOptions}
*/
public static final class CopyCalculatorOptions extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:mediapipe.CopyCalculatorOptions)
CopyCalculatorOptionsOrBuilder {
// Use CopyCalculatorOptions.newBuilder() to construct.
private CopyCalculatorOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CopyCalculatorOptions() {
rotation_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CopyCalculatorOptions(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation value = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
rotation_ = rawValue;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.CopyCalculator.CopyCalculatorOptions.class, mediapipe.CopyCalculator.CopyCalculatorOptions.Builder.class);
}
/**
* Protobuf enum {@code mediapipe.CopyCalculatorOptions.Rotation}
*/
public enum Rotation
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>NONE = 0;</code>
*/
NONE(0),
/**
* <pre>
* rotate 90 degrees counterclockwise
* </pre>
*
* <code>CCW = 1;</code>
*/
CCW(1),
/**
* <pre>
* hack to rectify convfloat
* </pre>
*
* <code>CCW_FLIP = 2;</code>
*/
CCW_FLIP(2),
;
/**
* <code>NONE = 0;</code>
*/
public static final int NONE_VALUE = 0;
/**
* <pre>
* rotate 90 degrees counterclockwise
* </pre>
*
* <code>CCW = 1;</code>
*/
public static final int CCW_VALUE = 1;
/**
* <pre>
* hack to rectify convfloat
* </pre>
*
* <code>CCW_FLIP = 2;</code>
*/
public static final int CCW_FLIP_VALUE = 2;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Rotation valueOf(int value) {
return forNumber(value);
}
public static Rotation forNumber(int value) {
switch (value) {
case 0: return NONE;
case 1: return CCW;
case 2: return CCW_FLIP;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Rotation>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
Rotation> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Rotation>() {
public Rotation findValueByNumber(int number) {
return Rotation.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return mediapipe.CopyCalculator.CopyCalculatorOptions.getDescriptor().getEnumTypes().get(0);
}
private static final Rotation[] VALUES = values();
public static Rotation valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private Rotation(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:mediapipe.CopyCalculatorOptions.Rotation)
}
private int bitField0_;
public static final int ROTATION_FIELD_NUMBER = 1;
private int rotation_;
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public boolean hasRotation() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation() {
mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation result = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rotation_);
return result == null ? mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.NONE : result;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, rotation_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, rotation_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof mediapipe.CopyCalculator.CopyCalculatorOptions)) {
return super.equals(obj);
}
mediapipe.CopyCalculator.CopyCalculatorOptions other = (mediapipe.CopyCalculator.CopyCalculatorOptions) obj;
boolean result = true;
result = result && (hasRotation() == other.hasRotation());
if (hasRotation()) {
result = result && rotation_ == other.rotation_;
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRotation()) {
hash = (37 * hash) + ROTATION_FIELD_NUMBER;
hash = (53 * hash) + rotation_;
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(mediapipe.CopyCalculator.CopyCalculatorOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code mediapipe.CopyCalculatorOptions}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:mediapipe.CopyCalculatorOptions)
mediapipe.CopyCalculator.CopyCalculatorOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.CopyCalculator.CopyCalculatorOptions.class, mediapipe.CopyCalculator.CopyCalculatorOptions.Builder.class);
}
// Construct using mediapipe.CopyCalculator.CopyCalculatorOptions.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
rotation_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return mediapipe.CopyCalculator.internal_static_mediapipe_CopyCalculatorOptions_descriptor;
}
public mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstanceForType() {
return mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance();
}
public mediapipe.CopyCalculator.CopyCalculatorOptions build() {
mediapipe.CopyCalculator.CopyCalculatorOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public mediapipe.CopyCalculator.CopyCalculatorOptions buildPartial() {
mediapipe.CopyCalculator.CopyCalculatorOptions result = new mediapipe.CopyCalculator.CopyCalculatorOptions(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.rotation_ = rotation_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof mediapipe.CopyCalculator.CopyCalculatorOptions) {
return mergeFrom((mediapipe.CopyCalculator.CopyCalculatorOptions)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(mediapipe.CopyCalculator.CopyCalculatorOptions other) {
if (other == mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance()) return this;
if (other.hasRotation()) {
setRotation(other.getRotation());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
mediapipe.CopyCalculator.CopyCalculatorOptions parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (mediapipe.CopyCalculator.CopyCalculatorOptions) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int rotation_ = 0;
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public boolean hasRotation() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation getRotation() {
mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation result = mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.valueOf(rotation_);
return result == null ? mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation.NONE : result;
}
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public Builder setRotation(mediapipe.CopyCalculator.CopyCalculatorOptions.Rotation value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
rotation_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .mediapipe.CopyCalculatorOptions.Rotation rotation = 1 [default = NONE];</code>
*/
public Builder clearRotation() {
bitField0_ = (bitField0_ & ~0x00000001);
rotation_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:mediapipe.CopyCalculatorOptions)
}
// @@protoc_insertion_point(class_scope:mediapipe.CopyCalculatorOptions)
private static final mediapipe.CopyCalculator.CopyCalculatorOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new mediapipe.CopyCalculator.CopyCalculatorOptions();
}
public static mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<CopyCalculatorOptions>
PARSER = new com.google.protobuf.AbstractParser<CopyCalculatorOptions>() {
public CopyCalculatorOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CopyCalculatorOptions(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CopyCalculatorOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CopyCalculatorOptions> getParserForType() {
return PARSER;
}
public mediapipe.CopyCalculator.CopyCalculatorOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
public static final int EXT_FIELD_NUMBER = 161773876;
/**
* <code>extend .mediapipe.CalculatorOptions { ... }</code>
*/
public static final
com.google.protobuf.GeneratedMessage.GeneratedExtension<
com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions,
mediapipe.CopyCalculator.CopyCalculatorOptions> ext = com.google.protobuf.GeneratedMessage
.newMessageScopedGeneratedExtension(
mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance(),
0,
mediapipe.CopyCalculator.CopyCalculatorOptions.class,
mediapipe.CopyCalculator.CopyCalculatorOptions.getDefaultInstance());
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_mediapipe_CopyCalculatorOptions_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n#mediapipe/gpu/copy_calculator.proto\022\tm" +
"ediapipe\032$mediapipe/framework/calculator" +
".proto\"\327\001\n\025CopyCalculatorOptions\022A\n\010rota" +
"tion\030\001 \001(\0162).mediapipe.CopyCalculatorOpt" +
"ions.Rotation:\004NONE\"+\n\010Rotation\022\010\n\004NONE\020" +
"\000\022\007\n\003CCW\020\001\022\014\n\010CCW_FLIP\020\0022N\n\003ext\022\034.mediap" +
"ipe.CalculatorOptions\030\264\362\221M \001(\0132 .mediapi" +
"pe.CopyCalculatorOptions"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.mediapipe.proto.CalculatorProto.getDescriptor(),
}, assigner);
internal_static_mediapipe_CopyCalculatorOptions_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_mediapipe_CopyCalculatorOptions_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_mediapipe_CopyCalculatorOptions_descriptor,
new java.lang.String[] { "Rotation", });
com.google.mediapipe.proto.CalculatorProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -0,0 +1,619 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mediapipe/gpu/gl_context_options.proto
package mediapipe;
public final class GlContextOptionsOuterClass {
private GlContextOptionsOuterClass() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
registry.add(mediapipe.GlContextOptionsOuterClass.GlContextOptions.ext);
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface GlContextOptionsOrBuilder extends
// @@protoc_insertion_point(interface_extends:mediapipe.GlContextOptions)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional string gl_context_name = 1;</code>
*/
boolean hasGlContextName();
/**
* <code>optional string gl_context_name = 1;</code>
*/
java.lang.String getGlContextName();
/**
* <code>optional string gl_context_name = 1;</code>
*/
com.google.protobuf.ByteString
getGlContextNameBytes();
}
/**
* Protobuf type {@code mediapipe.GlContextOptions}
*/
public static final class GlContextOptions extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:mediapipe.GlContextOptions)
GlContextOptionsOrBuilder {
// Use GlContextOptions.newBuilder() to construct.
private GlContextOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GlContextOptions() {
glContextName_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GlContextOptions(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
glContextName_ = bs;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.GlContextOptionsOuterClass.GlContextOptions.class, mediapipe.GlContextOptionsOuterClass.GlContextOptions.Builder.class);
}
private int bitField0_;
public static final int GL_CONTEXT_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object glContextName_;
/**
* <code>optional string gl_context_name = 1;</code>
*/
public boolean hasGlContextName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public java.lang.String getGlContextName() {
java.lang.Object ref = glContextName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
glContextName_ = s;
}
return s;
}
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public com.google.protobuf.ByteString
getGlContextNameBytes() {
java.lang.Object ref = glContextName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
glContextName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, glContextName_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, glContextName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof mediapipe.GlContextOptionsOuterClass.GlContextOptions)) {
return super.equals(obj);
}
mediapipe.GlContextOptionsOuterClass.GlContextOptions other = (mediapipe.GlContextOptionsOuterClass.GlContextOptions) obj;
boolean result = true;
result = result && (hasGlContextName() == other.hasGlContextName());
if (hasGlContextName()) {
result = result && getGlContextName()
.equals(other.getGlContextName());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasGlContextName()) {
hash = (37 * hash) + GL_CONTEXT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getGlContextName().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(mediapipe.GlContextOptionsOuterClass.GlContextOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code mediapipe.GlContextOptions}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:mediapipe.GlContextOptions)
mediapipe.GlContextOptionsOuterClass.GlContextOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.GlContextOptionsOuterClass.GlContextOptions.class, mediapipe.GlContextOptionsOuterClass.GlContextOptions.Builder.class);
}
// Construct using mediapipe.GlContextOptionsOuterClass.GlContextOptions.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
glContextName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return mediapipe.GlContextOptionsOuterClass.internal_static_mediapipe_GlContextOptions_descriptor;
}
public mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstanceForType() {
return mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance();
}
public mediapipe.GlContextOptionsOuterClass.GlContextOptions build() {
mediapipe.GlContextOptionsOuterClass.GlContextOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public mediapipe.GlContextOptionsOuterClass.GlContextOptions buildPartial() {
mediapipe.GlContextOptionsOuterClass.GlContextOptions result = new mediapipe.GlContextOptionsOuterClass.GlContextOptions(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.glContextName_ = glContextName_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof mediapipe.GlContextOptionsOuterClass.GlContextOptions) {
return mergeFrom((mediapipe.GlContextOptionsOuterClass.GlContextOptions)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(mediapipe.GlContextOptionsOuterClass.GlContextOptions other) {
if (other == mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance()) return this;
if (other.hasGlContextName()) {
bitField0_ |= 0x00000001;
glContextName_ = other.glContextName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
mediapipe.GlContextOptionsOuterClass.GlContextOptions parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (mediapipe.GlContextOptionsOuterClass.GlContextOptions) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object glContextName_ = "";
/**
* <code>optional string gl_context_name = 1;</code>
*/
public boolean hasGlContextName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public java.lang.String getGlContextName() {
java.lang.Object ref = glContextName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
glContextName_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public com.google.protobuf.ByteString
getGlContextNameBytes() {
java.lang.Object ref = glContextName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
glContextName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public Builder setGlContextName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
glContextName_ = value;
onChanged();
return this;
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public Builder clearGlContextName() {
bitField0_ = (bitField0_ & ~0x00000001);
glContextName_ = getDefaultInstance().getGlContextName();
onChanged();
return this;
}
/**
* <code>optional string gl_context_name = 1;</code>
*/
public Builder setGlContextNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
glContextName_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:mediapipe.GlContextOptions)
}
// @@protoc_insertion_point(class_scope:mediapipe.GlContextOptions)
private static final mediapipe.GlContextOptionsOuterClass.GlContextOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new mediapipe.GlContextOptionsOuterClass.GlContextOptions();
}
public static mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<GlContextOptions>
PARSER = new com.google.protobuf.AbstractParser<GlContextOptions>() {
public GlContextOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GlContextOptions(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GlContextOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GlContextOptions> getParserForType() {
return PARSER;
}
public mediapipe.GlContextOptionsOuterClass.GlContextOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
public static final int EXT_FIELD_NUMBER = 222332034;
/**
* <code>extend .mediapipe.CalculatorOptions { ... }</code>
*/
public static final
com.google.protobuf.GeneratedMessage.GeneratedExtension<
com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions,
mediapipe.GlContextOptionsOuterClass.GlContextOptions> ext = com.google.protobuf.GeneratedMessage
.newMessageScopedGeneratedExtension(
mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance(),
0,
mediapipe.GlContextOptionsOuterClass.GlContextOptions.class,
mediapipe.GlContextOptionsOuterClass.GlContextOptions.getDefaultInstance());
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_mediapipe_GlContextOptions_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_mediapipe_GlContextOptions_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n&mediapipe/gpu/gl_context_options.proto" +
"\022\tmediapipe\032$mediapipe/framework/calcula" +
"tor.proto\"v\n\020GlContextOptions\022\027\n\017gl_cont" +
"ext_name\030\001 \001(\t2I\n\003ext\022\034.mediapipe.Calcul" +
"atorOptions\030\202\211\202j \001(\0132\033.mediapipe.GlConte" +
"xtOptions"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.mediapipe.proto.CalculatorProto.getDescriptor(),
}, assigner);
internal_static_mediapipe_GlContextOptions_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_mediapipe_GlContextOptions_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_mediapipe_GlContextOptions_descriptor,
new java.lang.String[] { "GlContextName", });
com.google.mediapipe.proto.CalculatorProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -0,0 +1,894 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mediapipe/framework/test_calculators.proto
package mediapipe;
public final class TestCalculators {
private TestCalculators() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
registry.add(mediapipe.TestCalculators.RandomMatrixCalculatorOptions.ext);
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface RandomMatrixCalculatorOptionsOrBuilder extends
// @@protoc_insertion_point(interface_extends:mediapipe.RandomMatrixCalculatorOptions)
com.google.protobuf.MessageOrBuilder {
/**
* <code>optional int32 rows = 1;</code>
*/
boolean hasRows();
/**
* <code>optional int32 rows = 1;</code>
*/
int getRows();
/**
* <code>optional int32 cols = 2;</code>
*/
boolean hasCols();
/**
* <code>optional int32 cols = 2;</code>
*/
int getCols();
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
boolean hasStartTimestamp();
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
long getStartTimestamp();
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
boolean hasLimitTimestamp();
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
long getLimitTimestamp();
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
boolean hasTimestampStep();
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
long getTimestampStep();
}
/**
* Protobuf type {@code mediapipe.RandomMatrixCalculatorOptions}
*/
public static final class RandomMatrixCalculatorOptions extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:mediapipe.RandomMatrixCalculatorOptions)
RandomMatrixCalculatorOptionsOrBuilder {
// Use RandomMatrixCalculatorOptions.newBuilder() to construct.
private RandomMatrixCalculatorOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RandomMatrixCalculatorOptions() {
rows_ = 0;
cols_ = 0;
startTimestamp_ = 0L;
limitTimestamp_ = 0L;
timestampStep_ = 0L;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RandomMatrixCalculatorOptions(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
rows_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
cols_ = input.readInt32();
break;
}
case 24: {
bitField0_ |= 0x00000004;
startTimestamp_ = input.readInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
limitTimestamp_ = input.readInt64();
break;
}
case 40: {
bitField0_ |= 0x00000010;
timestampStep_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class, mediapipe.TestCalculators.RandomMatrixCalculatorOptions.Builder.class);
}
private int bitField0_;
public static final int ROWS_FIELD_NUMBER = 1;
private int rows_;
/**
* <code>optional int32 rows = 1;</code>
*/
public boolean hasRows() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int32 rows = 1;</code>
*/
public int getRows() {
return rows_;
}
public static final int COLS_FIELD_NUMBER = 2;
private int cols_;
/**
* <code>optional int32 cols = 2;</code>
*/
public boolean hasCols() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 cols = 2;</code>
*/
public int getCols() {
return cols_;
}
public static final int START_TIMESTAMP_FIELD_NUMBER = 3;
private long startTimestamp_;
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public boolean hasStartTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public long getStartTimestamp() {
return startTimestamp_;
}
public static final int LIMIT_TIMESTAMP_FIELD_NUMBER = 4;
private long limitTimestamp_;
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public boolean hasLimitTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public long getLimitTimestamp() {
return limitTimestamp_;
}
public static final int TIMESTAMP_STEP_FIELD_NUMBER = 5;
private long timestampStep_;
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public boolean hasTimestampStep() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public long getTimestampStep() {
return timestampStep_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, rows_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, cols_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeInt64(3, startTimestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, limitTimestamp_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeInt64(5, timestampStep_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, rows_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, cols_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, startTimestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, limitTimestamp_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(5, timestampStep_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof mediapipe.TestCalculators.RandomMatrixCalculatorOptions)) {
return super.equals(obj);
}
mediapipe.TestCalculators.RandomMatrixCalculatorOptions other = (mediapipe.TestCalculators.RandomMatrixCalculatorOptions) obj;
boolean result = true;
result = result && (hasRows() == other.hasRows());
if (hasRows()) {
result = result && (getRows()
== other.getRows());
}
result = result && (hasCols() == other.hasCols());
if (hasCols()) {
result = result && (getCols()
== other.getCols());
}
result = result && (hasStartTimestamp() == other.hasStartTimestamp());
if (hasStartTimestamp()) {
result = result && (getStartTimestamp()
== other.getStartTimestamp());
}
result = result && (hasLimitTimestamp() == other.hasLimitTimestamp());
if (hasLimitTimestamp()) {
result = result && (getLimitTimestamp()
== other.getLimitTimestamp());
}
result = result && (hasTimestampStep() == other.hasTimestampStep());
if (hasTimestampStep()) {
result = result && (getTimestampStep()
== other.getTimestampStep());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRows()) {
hash = (37 * hash) + ROWS_FIELD_NUMBER;
hash = (53 * hash) + getRows();
}
if (hasCols()) {
hash = (37 * hash) + COLS_FIELD_NUMBER;
hash = (53 * hash) + getCols();
}
if (hasStartTimestamp()) {
hash = (37 * hash) + START_TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getStartTimestamp());
}
if (hasLimitTimestamp()) {
hash = (37 * hash) + LIMIT_TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getLimitTimestamp());
}
if (hasTimestampStep()) {
hash = (37 * hash) + TIMESTAMP_STEP_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getTimestampStep());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(mediapipe.TestCalculators.RandomMatrixCalculatorOptions prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code mediapipe.RandomMatrixCalculatorOptions}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:mediapipe.RandomMatrixCalculatorOptions)
mediapipe.TestCalculators.RandomMatrixCalculatorOptionsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable
.ensureFieldAccessorsInitialized(
mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class, mediapipe.TestCalculators.RandomMatrixCalculatorOptions.Builder.class);
}
// Construct using mediapipe.TestCalculators.RandomMatrixCalculatorOptions.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
rows_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
cols_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
startTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
limitTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
timestampStep_ = 0L;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return mediapipe.TestCalculators.internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor;
}
public mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstanceForType() {
return mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance();
}
public mediapipe.TestCalculators.RandomMatrixCalculatorOptions build() {
mediapipe.TestCalculators.RandomMatrixCalculatorOptions result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public mediapipe.TestCalculators.RandomMatrixCalculatorOptions buildPartial() {
mediapipe.TestCalculators.RandomMatrixCalculatorOptions result = new mediapipe.TestCalculators.RandomMatrixCalculatorOptions(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.rows_ = rows_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.cols_ = cols_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.startTimestamp_ = startTimestamp_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.limitTimestamp_ = limitTimestamp_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.timestampStep_ = timestampStep_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof mediapipe.TestCalculators.RandomMatrixCalculatorOptions) {
return mergeFrom((mediapipe.TestCalculators.RandomMatrixCalculatorOptions)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(mediapipe.TestCalculators.RandomMatrixCalculatorOptions other) {
if (other == mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance()) return this;
if (other.hasRows()) {
setRows(other.getRows());
}
if (other.hasCols()) {
setCols(other.getCols());
}
if (other.hasStartTimestamp()) {
setStartTimestamp(other.getStartTimestamp());
}
if (other.hasLimitTimestamp()) {
setLimitTimestamp(other.getLimitTimestamp());
}
if (other.hasTimestampStep()) {
setTimestampStep(other.getTimestampStep());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
mediapipe.TestCalculators.RandomMatrixCalculatorOptions parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (mediapipe.TestCalculators.RandomMatrixCalculatorOptions) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int rows_ ;
/**
* <code>optional int32 rows = 1;</code>
*/
public boolean hasRows() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int32 rows = 1;</code>
*/
public int getRows() {
return rows_;
}
/**
* <code>optional int32 rows = 1;</code>
*/
public Builder setRows(int value) {
bitField0_ |= 0x00000001;
rows_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 rows = 1;</code>
*/
public Builder clearRows() {
bitField0_ = (bitField0_ & ~0x00000001);
rows_ = 0;
onChanged();
return this;
}
private int cols_ ;
/**
* <code>optional int32 cols = 2;</code>
*/
public boolean hasCols() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int32 cols = 2;</code>
*/
public int getCols() {
return cols_;
}
/**
* <code>optional int32 cols = 2;</code>
*/
public Builder setCols(int value) {
bitField0_ |= 0x00000002;
cols_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 cols = 2;</code>
*/
public Builder clearCols() {
bitField0_ = (bitField0_ & ~0x00000002);
cols_ = 0;
onChanged();
return this;
}
private long startTimestamp_ ;
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public boolean hasStartTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public long getStartTimestamp() {
return startTimestamp_;
}
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public Builder setStartTimestamp(long value) {
bitField0_ |= 0x00000004;
startTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 start_timestamp = 3;</code>
*/
public Builder clearStartTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
startTimestamp_ = 0L;
onChanged();
return this;
}
private long limitTimestamp_ ;
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public boolean hasLimitTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public long getLimitTimestamp() {
return limitTimestamp_;
}
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public Builder setLimitTimestamp(long value) {
bitField0_ |= 0x00000008;
limitTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 limit_timestamp = 4;</code>
*/
public Builder clearLimitTimestamp() {
bitField0_ = (bitField0_ & ~0x00000008);
limitTimestamp_ = 0L;
onChanged();
return this;
}
private long timestampStep_ ;
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public boolean hasTimestampStep() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public long getTimestampStep() {
return timestampStep_;
}
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public Builder setTimestampStep(long value) {
bitField0_ |= 0x00000010;
timestampStep_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 timestamp_step = 5;</code>
*/
public Builder clearTimestampStep() {
bitField0_ = (bitField0_ & ~0x00000010);
timestampStep_ = 0L;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:mediapipe.RandomMatrixCalculatorOptions)
}
// @@protoc_insertion_point(class_scope:mediapipe.RandomMatrixCalculatorOptions)
private static final mediapipe.TestCalculators.RandomMatrixCalculatorOptions DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new mediapipe.TestCalculators.RandomMatrixCalculatorOptions();
}
public static mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated public static final com.google.protobuf.Parser<RandomMatrixCalculatorOptions>
PARSER = new com.google.protobuf.AbstractParser<RandomMatrixCalculatorOptions>() {
public RandomMatrixCalculatorOptions parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RandomMatrixCalculatorOptions(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RandomMatrixCalculatorOptions> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RandomMatrixCalculatorOptions> getParserForType() {
return PARSER;
}
public mediapipe.TestCalculators.RandomMatrixCalculatorOptions getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
public static final int EXT_FIELD_NUMBER = 52056136;
/**
* <code>extend .mediapipe.CalculatorOptions { ... }</code>
*/
public static final
com.google.protobuf.GeneratedMessage.GeneratedExtension<
com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions,
mediapipe.TestCalculators.RandomMatrixCalculatorOptions> ext = com.google.protobuf.GeneratedMessage
.newMessageScopedGeneratedExtension(
mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance(),
0,
mediapipe.TestCalculators.RandomMatrixCalculatorOptions.class,
mediapipe.TestCalculators.RandomMatrixCalculatorOptions.getDefaultInstance());
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n*mediapipe/framework/test_calculators.p" +
"roto\022\tmediapipe\032$mediapipe/framework/cal" +
"culator.proto\"\335\001\n\035RandomMatrixCalculator" +
"Options\022\014\n\004rows\030\001 \001(\005\022\014\n\004cols\030\002 \001(\005\022\027\n\017s" +
"tart_timestamp\030\003 \001(\003\022\027\n\017limit_timestamp\030" +
"\004 \001(\003\022\026\n\016timestamp_step\030\005 \001(\0032V\n\003ext\022\034.m" +
"ediapipe.CalculatorOptions\030\310\240\351\030 \001(\0132(.me" +
"diapipe.RandomMatrixCalculatorOptions"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.mediapipe.proto.CalculatorProto.getDescriptor(),
}, assigner);
internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_mediapipe_RandomMatrixCalculatorOptions_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_mediapipe_RandomMatrixCalculatorOptions_descriptor,
new java.lang.String[] { "Rows", "Cols", "StartTimestamp", "LimitTimestamp", "TimestampStep", });
com.google.mediapipe.proto.CalculatorProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent">
<FrameLayout
android:id="@+id/preview_display_layout"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
tools:ignore="MissingConstraints">
<TextView
android:id="@+id/no_camera_access_view"
android:layout_height="fill_parent"
android:layout_width="fill_parent"
android:gravity="center"
android:text="@string/no_camera_access" />
</FrameLayout>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>

View File

@ -0,0 +1,5 @@
<resources>
<string name="app_name">Hand Tracking GPU</string>
<string name="no_camera_access" translatable="false">Please grant camera permissions.</string>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,29 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.1'
classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.6'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,20 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true

View File

@ -0,0 +1,6 @@
#Wed Oct 16 15:26:36 PKT 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,10 @@
## This file is automatically generated by Android Studio.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file should *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=/home/ali/Android/Sdk

View File

@ -0,0 +1,2 @@
include ':app'
rootProject.name='Hand Tracking GPU'