Rename the mediapipe java image container from Image to MPImage.

PiperOrigin-RevId: 482933122
This commit is contained in:
Jiuqiang Tang 2022-10-21 18:15:22 -07:00 committed by Copybara-Service
parent 7196db275e
commit abed54ea30
24 changed files with 273 additions and 267 deletions

View File

@ -17,8 +17,8 @@ package com.google.mediapipe.framework;
import android.graphics.Bitmap;
import com.google.mediapipe.framework.image.BitmapExtractor;
import com.google.mediapipe.framework.image.ByteBufferExtractor;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.ImageProperties;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.framework.image.MPImageProperties;
import java.nio.ByteBuffer;
// TODO: use Preconditions in this file.
@ -60,24 +60,24 @@ public class AndroidPacketCreator extends PacketCreator {
}
/**
* Creates an Image packet from an {@link Image}.
* Creates a MediaPipe Image packet from a {@link MPImage}.
*
* <p>The ImageContainerType must be IMAGE_CONTAINER_BYTEBUFFER or IMAGE_CONTAINER_BITMAP.
*/
public Packet createImage(Image image) {
public Packet createImage(MPImage image) {
// TODO: Choose the best storage from multiple containers.
ImageProperties properties = image.getContainedImageProperties().get(0);
if (properties.getStorageType() == Image.STORAGE_TYPE_BYTEBUFFER) {
MPImageProperties properties = image.getContainedImageProperties().get(0);
if (properties.getStorageType() == MPImage.STORAGE_TYPE_BYTEBUFFER) {
ByteBuffer buffer = ByteBufferExtractor.extract(image);
int numChannels = 0;
switch (properties.getImageFormat()) {
case Image.IMAGE_FORMAT_RGBA:
case MPImage.IMAGE_FORMAT_RGBA:
numChannels = 4;
break;
case Image.IMAGE_FORMAT_RGB:
case MPImage.IMAGE_FORMAT_RGB:
numChannels = 3;
break;
case Image.IMAGE_FORMAT_ALPHA:
case MPImage.IMAGE_FORMAT_ALPHA:
numChannels = 1;
break;
default: // fall out
@ -90,7 +90,7 @@ public class AndroidPacketCreator extends PacketCreator {
int height = image.getHeight();
return createImage(buffer, width, height, numChannels);
}
if (properties.getImageFormat() == Image.STORAGE_TYPE_BITMAP) {
if (properties.getImageFormat() == MPImage.STORAGE_TYPE_BITMAP) {
Bitmap bitmap = BitmapExtractor.extract(image);
if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
throw new UnsupportedOperationException("bitmap must use ARGB_8888 config.");

View File

@ -18,29 +18,29 @@ package com.google.mediapipe.framework.image;
import android.graphics.Bitmap;
/**
* Utility for extracting {@link android.graphics.Bitmap} from {@link Image}.
* Utility for extracting {@link android.graphics.Bitmap} from {@link MPImage}.
*
* <p>Currently it only supports {@link Image} with {@link Image#STORAGE_TYPE_BITMAP}, otherwise
* <p>Currently it only supports {@link MPImage} with {@link MPImage#STORAGE_TYPE_BITMAP}, otherwise
* {@link IllegalArgumentException} will be thrown.
*/
public final class BitmapExtractor {
/**
* Extracts a {@link android.graphics.Bitmap} from an {@link Image}.
* Extracts a {@link android.graphics.Bitmap} from a {@link MPImage}.
*
* @param image the image to extract {@link android.graphics.Bitmap} from.
* @return the {@link android.graphics.Bitmap} stored in {@link Image}
* @return the {@link android.graphics.Bitmap} stored in {@link MPImage}
* @throws IllegalArgumentException when the extraction requires unsupported format or data type
* conversions.
*/
public static Bitmap extract(Image image) {
ImageContainer imageContainer = image.getContainer(Image.STORAGE_TYPE_BITMAP);
public static Bitmap extract(MPImage image) {
MPImageContainer imageContainer = image.getContainer(MPImage.STORAGE_TYPE_BITMAP);
if (imageContainer != null) {
return ((BitmapImageContainer) imageContainer).getBitmap();
} else {
// TODO: Support ByteBuffer -> Bitmap conversion.
throw new IllegalArgumentException(
"Extracting Bitmap from an Image created by objects other than Bitmap is not"
"Extracting Bitmap from a MPImage created by objects other than Bitmap is not"
+ " supported");
}
}

View File

@ -22,7 +22,7 @@ import android.provider.MediaStore;
import java.io.IOException;
/**
* Builds {@link Image} from {@link android.graphics.Bitmap}.
* Builds {@link MPImage} from {@link android.graphics.Bitmap}.
*
* <p>You can pass in either mutable or immutable {@link android.graphics.Bitmap}. However once
* {@link android.graphics.Bitmap} is passed in, to keep data integrity you shouldn't modify content
@ -49,7 +49,7 @@ public class BitmapImageBuilder {
}
/**
* Creates the builder to build {@link Image} from a file.
* Creates the builder to build {@link MPImage} from a file.
*
* @param context the application context.
* @param uri the path to the resource file.
@ -58,15 +58,15 @@ public class BitmapImageBuilder {
this(MediaStore.Images.Media.getBitmap(context.getContentResolver(), uri));
}
/** Sets value for {@link Image#getTimestamp()}. */
/** Sets value for {@link MPImage#getTimestamp()}. */
BitmapImageBuilder setTimestamp(long timestamp) {
this.timestamp = timestamp;
return this;
}
/** Builds an {@link Image} instance. */
public Image build() {
return new Image(
/** Builds a {@link MPImage} instance. */
public MPImage build() {
return new MPImage(
new BitmapImageContainer(bitmap), timestamp, bitmap.getWidth(), bitmap.getHeight());
}
}

View File

@ -16,19 +16,19 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import android.graphics.Bitmap;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
class BitmapImageContainer implements ImageContainer {
class BitmapImageContainer implements MPImageContainer {
private final Bitmap bitmap;
private final ImageProperties properties;
private final MPImageProperties properties;
public BitmapImageContainer(Bitmap bitmap) {
this.bitmap = bitmap;
this.properties =
ImageProperties.builder()
MPImageProperties.builder()
.setImageFormat(convertFormatCode(bitmap.getConfig()))
.setStorageType(Image.STORAGE_TYPE_BITMAP)
.setStorageType(MPImage.STORAGE_TYPE_BITMAP)
.build();
}
@ -37,7 +37,7 @@ class BitmapImageContainer implements ImageContainer {
}
@Override
public ImageProperties getImageProperties() {
public MPImageProperties getImageProperties() {
return properties;
}
@ -46,15 +46,15 @@ class BitmapImageContainer implements ImageContainer {
bitmap.recycle();
}
@ImageFormat
@MPImageFormat
static int convertFormatCode(Bitmap.Config config) {
switch (config) {
case ALPHA_8:
return Image.IMAGE_FORMAT_ALPHA;
return MPImage.IMAGE_FORMAT_ALPHA;
case ARGB_8888:
return Image.IMAGE_FORMAT_RGBA;
return MPImage.IMAGE_FORMAT_RGBA;
default:
return Image.IMAGE_FORMAT_UNKNOWN;
return MPImage.IMAGE_FORMAT_UNKNOWN;
}
}
}

View File

@ -21,45 +21,45 @@ import android.graphics.Bitmap.Config;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import com.google.auto.value.AutoValue;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Locale;
/**
* Utility for extracting {@link ByteBuffer} from {@link Image}.
* Utility for extracting {@link ByteBuffer} from {@link MPImage}.
*
* <p>Currently it only supports {@link Image} with {@link Image#STORAGE_TYPE_BYTEBUFFER}, otherwise
* {@link IllegalArgumentException} will be thrown.
* <p>Currently it only supports {@link MPImage} with {@link MPImage#STORAGE_TYPE_BYTEBUFFER},
* otherwise {@link IllegalArgumentException} will be thrown.
*/
public class ByteBufferExtractor {
/**
* Extracts a {@link ByteBuffer} from an {@link Image}.
* Extracts a {@link ByteBuffer} from a {@link MPImage}.
*
* <p>The returned {@link ByteBuffer} is a read-only view, with the first available {@link
* ImageProperties} whose storage type is {@code Image.STORAGE_TYPE_BYTEBUFFER}.
* MPImageProperties} whose storage type is {@code MPImage.STORAGE_TYPE_BYTEBUFFER}.
*
* @see Image#getContainedImageProperties()
* @see MPImage#getContainedImageProperties()
* @return A read-only {@link ByteBuffer}.
* @throws IllegalArgumentException when the image doesn't contain a {@link ByteBuffer} storage.
*/
@SuppressLint("SwitchIntDef")
public static ByteBuffer extract(Image image) {
ImageContainer container = image.getContainer();
public static ByteBuffer extract(MPImage image) {
MPImageContainer container = image.getContainer();
switch (container.getImageProperties().getStorageType()) {
case Image.STORAGE_TYPE_BYTEBUFFER:
case MPImage.STORAGE_TYPE_BYTEBUFFER:
ByteBufferImageContainer byteBufferImageContainer = (ByteBufferImageContainer) container;
return byteBufferImageContainer.getByteBuffer().asReadOnlyBuffer();
default:
throw new IllegalArgumentException(
"Extract ByteBuffer from an Image created by objects other than Bytebuffer is not"
"Extract ByteBuffer from a MPImage created by objects other than Bytebuffer is not"
+ " supported");
}
}
/**
* Extracts a readonly {@link ByteBuffer} in given {@code targetFormat} from an {@link Image}.
* Extracts a readonly {@link ByteBuffer} in given {@code targetFormat} from a {@link MPImage}.
*
* <p>Format conversion spec:
*
@ -70,26 +70,26 @@ public class ByteBufferExtractor {
*
* @param image the image to extract buffer from.
* @param targetFormat the image format of the result bytebuffer.
* @return the readonly {@link ByteBuffer} stored in {@link Image}
* @return the readonly {@link ByteBuffer} stored in {@link MPImage}
* @throws IllegalArgumentException when the extraction requires unsupported format or data type
* conversions.
*/
static ByteBuffer extract(Image image, @ImageFormat int targetFormat) {
ImageContainer container;
ImageProperties byteBufferProperties =
ImageProperties.builder()
.setStorageType(Image.STORAGE_TYPE_BYTEBUFFER)
static ByteBuffer extract(MPImage image, @MPImageFormat int targetFormat) {
MPImageContainer container;
MPImageProperties byteBufferProperties =
MPImageProperties.builder()
.setStorageType(MPImage.STORAGE_TYPE_BYTEBUFFER)
.setImageFormat(targetFormat)
.build();
if ((container = image.getContainer(byteBufferProperties)) != null) {
ByteBufferImageContainer byteBufferImageContainer = (ByteBufferImageContainer) container;
return byteBufferImageContainer.getByteBuffer().asReadOnlyBuffer();
} else if ((container = image.getContainer(Image.STORAGE_TYPE_BYTEBUFFER)) != null) {
} else if ((container = image.getContainer(MPImage.STORAGE_TYPE_BYTEBUFFER)) != null) {
ByteBufferImageContainer byteBufferImageContainer = (ByteBufferImageContainer) container;
@ImageFormat int sourceFormat = byteBufferImageContainer.getImageFormat();
@MPImageFormat int sourceFormat = byteBufferImageContainer.getImageFormat();
return convertByteBuffer(byteBufferImageContainer.getByteBuffer(), sourceFormat, targetFormat)
.asReadOnlyBuffer();
} else if ((container = image.getContainer(Image.STORAGE_TYPE_BITMAP)) != null) {
} else if ((container = image.getContainer(MPImage.STORAGE_TYPE_BITMAP)) != null) {
BitmapImageContainer bitmapImageContainer = (BitmapImageContainer) container;
ByteBuffer byteBuffer =
extractByteBufferFromBitmap(bitmapImageContainer.getBitmap(), targetFormat)
@ -98,85 +98,89 @@ public class ByteBufferExtractor {
return byteBuffer;
} else {
throw new IllegalArgumentException(
"Extracting ByteBuffer from an Image created by objects other than Bitmap or"
"Extracting ByteBuffer from a MPImage created by objects other than Bitmap or"
+ " Bytebuffer is not supported");
}
}
/** A wrapper for a {@link ByteBuffer} and its {@link ImageFormat}. */
/** A wrapper for a {@link ByteBuffer} and its {@link MPImageFormat}. */
@AutoValue
abstract static class Result {
/** Gets the {@link ByteBuffer} in the result of {@link ByteBufferExtractor#extract(Image)}. */
/**
* Gets the {@link ByteBuffer} in the result of {@link ByteBufferExtractor#extract(MPImage)}.
*/
public abstract ByteBuffer buffer();
/** Gets the {@link ImageFormat} in the result of {@link ByteBufferExtractor#extract(Image)}. */
@ImageFormat
/**
* Gets the {@link MPImageFormat} in the result of {@link ByteBufferExtractor#extract(MPImage)}.
*/
@MPImageFormat
public abstract int format();
static Result create(ByteBuffer buffer, @ImageFormat int imageFormat) {
static Result create(ByteBuffer buffer, @MPImageFormat int imageFormat) {
return new AutoValue_ByteBufferExtractor_Result(buffer, imageFormat);
}
}
/**
* Extracts a {@link ByteBuffer} in any available {@code imageFormat} from an {@link Image}.
* Extracts a {@link ByteBuffer} in any available {@code imageFormat} from a {@link MPImage}.
*
* <p>It will make the best effort to return an already existed {@link ByteBuffer} to avoid copy.
*
* @return the readonly {@link ByteBuffer} stored in {@link Image}
* @return the readonly {@link ByteBuffer} stored in {@link MPImage}
* @throws IllegalArgumentException when {@code image} doesn't contain {@link ByteBuffer} with
* given {@code imageFormat}
*/
static Result extractInRecommendedFormat(Image image) {
ImageContainer container;
if ((container = image.getContainer(Image.STORAGE_TYPE_BITMAP)) != null) {
static Result extractInRecommendedFormat(MPImage image) {
MPImageContainer container;
if ((container = image.getContainer(MPImage.STORAGE_TYPE_BITMAP)) != null) {
Bitmap bitmap = ((BitmapImageContainer) container).getBitmap();
@ImageFormat int format = adviseImageFormat(bitmap);
@MPImageFormat int format = adviseImageFormat(bitmap);
Result result =
Result.create(extractByteBufferFromBitmap(bitmap, format).asReadOnlyBuffer(), format);
boolean unused =
image.addContainer(new ByteBufferImageContainer(result.buffer(), result.format()));
return result;
} else if ((container = image.getContainer(Image.STORAGE_TYPE_BYTEBUFFER)) != null) {
} else if ((container = image.getContainer(MPImage.STORAGE_TYPE_BYTEBUFFER)) != null) {
ByteBufferImageContainer byteBufferImageContainer = (ByteBufferImageContainer) container;
return Result.create(
byteBufferImageContainer.getByteBuffer().asReadOnlyBuffer(),
byteBufferImageContainer.getImageFormat());
} else {
throw new IllegalArgumentException(
"Extract ByteBuffer from an Image created by objects other than Bitmap or Bytebuffer"
"Extract ByteBuffer from a MPImage created by objects other than Bitmap or Bytebuffer"
+ " is not supported");
}
}
@ImageFormat
@MPImageFormat
private static int adviseImageFormat(Bitmap bitmap) {
if (bitmap.getConfig() == Config.ARGB_8888) {
return Image.IMAGE_FORMAT_RGBA;
return MPImage.IMAGE_FORMAT_RGBA;
} else {
throw new IllegalArgumentException(
String.format(
"Extracting ByteBuffer from an Image created by a Bitmap in config %s is not"
"Extracting ByteBuffer from a MPImage created by a Bitmap in config %s is not"
+ " supported",
bitmap.getConfig()));
}
}
private static ByteBuffer extractByteBufferFromBitmap(
Bitmap bitmap, @ImageFormat int imageFormat) {
Bitmap bitmap, @MPImageFormat int imageFormat) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1 && bitmap.isPremultiplied()) {
throw new IllegalArgumentException(
"Extracting ByteBuffer from an Image created by a premultiplied Bitmap is not"
"Extracting ByteBuffer from a MPImage created by a premultiplied Bitmap is not"
+ " supported");
}
if (bitmap.getConfig() == Config.ARGB_8888) {
if (imageFormat == Image.IMAGE_FORMAT_RGBA) {
if (imageFormat == MPImage.IMAGE_FORMAT_RGBA) {
ByteBuffer buffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(buffer);
buffer.rewind();
return buffer;
} else if (imageFormat == Image.IMAGE_FORMAT_RGB) {
} else if (imageFormat == MPImage.IMAGE_FORMAT_RGB) {
// TODO: Try Use RGBA buffer to create RGB buffer which might be faster.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
@ -196,14 +200,14 @@ public class ByteBufferExtractor {
}
throw new IllegalArgumentException(
String.format(
"Extracting ByteBuffer from an Image created by Bitmap and convert from %s to format"
"Extracting ByteBuffer from a MPImage created by Bitmap and convert from %s to format"
+ " %d is not supported",
bitmap.getConfig(), imageFormat));
}
private static ByteBuffer convertByteBuffer(
ByteBuffer source, @ImageFormat int sourceFormat, @ImageFormat int targetFormat) {
if (sourceFormat == Image.IMAGE_FORMAT_RGB && targetFormat == Image.IMAGE_FORMAT_RGBA) {
ByteBuffer source, @MPImageFormat int sourceFormat, @MPImageFormat int targetFormat) {
if (sourceFormat == MPImage.IMAGE_FORMAT_RGB && targetFormat == MPImage.IMAGE_FORMAT_RGBA) {
ByteBuffer target = ByteBuffer.allocateDirect(source.capacity() / 3 * 4);
// Extend the buffer when the target is longer than the source. Use two cursors and sweep the
// array reversely to convert in-place.
@ -221,7 +225,8 @@ public class ByteBufferExtractor {
target.put(array, 0, target.capacity());
target.rewind();
return target;
} else if (sourceFormat == Image.IMAGE_FORMAT_RGBA && targetFormat == Image.IMAGE_FORMAT_RGB) {
} else if (sourceFormat == MPImage.IMAGE_FORMAT_RGBA
&& targetFormat == MPImage.IMAGE_FORMAT_RGB) {
ByteBuffer target = ByteBuffer.allocateDirect(source.capacity() / 4 * 3);
// Shrink the buffer when the target is shorter than the source. Use two cursors and sweep the
// array to convert in-place.

View File

@ -15,11 +15,11 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
import java.nio.ByteBuffer;
/**
* Builds a {@link Image} from a {@link ByteBuffer}.
* Builds a {@link MPImage} from a {@link ByteBuffer}.
*
* <p>You can pass in either mutable or immutable {@link ByteBuffer}. However once {@link
* ByteBuffer} is passed in, to keep data integrity you shouldn't modify content in it.
@ -32,7 +32,7 @@ public class ByteBufferImageBuilder {
private final ByteBuffer buffer;
private final int width;
private final int height;
@ImageFormat private final int imageFormat;
@MPImageFormat private final int imageFormat;
// Optional fields.
private long timestamp;
@ -49,7 +49,7 @@ public class ByteBufferImageBuilder {
* @param imageFormat how the data encode the image.
*/
public ByteBufferImageBuilder(
ByteBuffer byteBuffer, int width, int height, @ImageFormat int imageFormat) {
ByteBuffer byteBuffer, int width, int height, @MPImageFormat int imageFormat) {
this.buffer = byteBuffer;
this.width = width;
this.height = height;
@ -58,14 +58,14 @@ public class ByteBufferImageBuilder {
this.timestamp = 0;
}
/** Sets value for {@link Image#getTimestamp()}. */
/** Sets value for {@link MPImage#getTimestamp()}. */
ByteBufferImageBuilder setTimestamp(long timestamp) {
this.timestamp = timestamp;
return this;
}
/** Builds an {@link Image} instance. */
public Image build() {
return new Image(new ByteBufferImageContainer(buffer, imageFormat), timestamp, width, height);
/** Builds a {@link MPImage} instance. */
public MPImage build() {
return new MPImage(new ByteBufferImageContainer(buffer, imageFormat), timestamp, width, height);
}
}

View File

@ -15,21 +15,19 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
import java.nio.ByteBuffer;
class ByteBufferImageContainer implements ImageContainer {
class ByteBufferImageContainer implements MPImageContainer {
private final ByteBuffer buffer;
private final ImageProperties properties;
private final MPImageProperties properties;
public ByteBufferImageContainer(
ByteBuffer buffer,
@ImageFormat int imageFormat) {
public ByteBufferImageContainer(ByteBuffer buffer, @MPImageFormat int imageFormat) {
this.buffer = buffer;
this.properties =
ImageProperties.builder()
.setStorageType(Image.STORAGE_TYPE_BYTEBUFFER)
MPImageProperties.builder()
.setStorageType(MPImage.STORAGE_TYPE_BYTEBUFFER)
.setImageFormat(imageFormat)
.build();
}
@ -39,14 +37,12 @@ class ByteBufferImageContainer implements ImageContainer {
}
@Override
public ImageProperties getImageProperties() {
public MPImageProperties getImageProperties() {
return properties;
}
/**
* Returns the image format.
*/
@ImageFormat
/** Returns the image format. */
@MPImageFormat
public int getImageFormat() {
return properties.getImageFormat();
}

View File

@ -29,10 +29,10 @@ import java.util.Map.Entry;
/**
* The wrapper class for image objects.
*
* <p>{@link Image} is designed to be an immutable image container, which could be shared
* <p>{@link MPImage} is designed to be an immutable image container, which could be shared
* cross-platforms.
*
* <p>To construct an {@link Image}, use the provided builders:
* <p>To construct a {@link MPImage}, use the provided builders:
*
* <ul>
* <li>{@link ByteBufferImageBuilder}
@ -40,7 +40,7 @@ import java.util.Map.Entry;
* <li>{@link MediaImageBuilder}
* </ul>
*
* <p>{@link Image} uses reference counting to maintain internal storage. When it is created the
* <p>{@link MPImage} uses reference counting to maintain internal storage. When it is created the
* reference count is 1. Developer can call {@link #close()} to reduce reference count to release
* internal storage earlier, otherwise Java garbage collection will release the storage eventually.
*
@ -53,7 +53,7 @@ import java.util.Map.Entry;
* <li>{@link MediaImageExtractor}
* </ul>
*/
public class Image implements Closeable {
public class MPImage implements Closeable {
/** Specifies the image format of an image. */
@IntDef({
@ -69,7 +69,7 @@ public class Image implements Closeable {
IMAGE_FORMAT_JPEG,
})
@Retention(RetentionPolicy.SOURCE)
public @interface ImageFormat {}
public @interface MPImageFormat {}
public static final int IMAGE_FORMAT_UNKNOWN = 0;
public static final int IMAGE_FORMAT_RGBA = 1;
@ -98,14 +98,14 @@ public class Image implements Closeable {
public static final int STORAGE_TYPE_IMAGE_PROXY = 4;
/**
* Returns a list of supported image properties for this {@link Image}.
* Returns a list of supported image properties for this {@link MPImage}.
*
* <p>Currently {@link Image} only support single storage type so the size of return list will
* <p>Currently {@link MPImage} only support single storage type so the size of return list will
* always be 1.
*
* @see ImageProperties
* @see MPImageProperties
*/
public List<ImageProperties> getContainedImageProperties() {
public List<MPImageProperties> getContainedImageProperties() {
return Collections.singletonList(getContainer().getImageProperties());
}
@ -124,7 +124,7 @@ public class Image implements Closeable {
return height;
}
/** Acquires a reference on this {@link Image}. This will increase the reference count by 1. */
/** Acquires a reference on this {@link MPImage}. This will increase the reference count by 1. */
private synchronized void acquire() {
referenceCount += 1;
}
@ -132,7 +132,7 @@ public class Image implements Closeable {
/**
* Removes a reference that was previously acquired or init.
*
* <p>When {@link Image} is created, it has 1 reference count.
* <p>When {@link MPImage} is created, it has 1 reference count.
*
* <p>When the reference count becomes 0, it will release the resource under the hood.
*/
@ -141,24 +141,24 @@ public class Image implements Closeable {
public synchronized void close() {
referenceCount -= 1;
if (referenceCount == 0) {
for (ImageContainer imageContainer : containerMap.values()) {
for (MPImageContainer imageContainer : containerMap.values()) {
imageContainer.close();
}
}
}
/** Advanced API access for {@link Image}. */
/** Advanced API access for {@link MPImage}. */
static final class Internal {
/**
* Acquires a reference on this {@link Image}. This will increase the reference count by 1.
* Acquires a reference on this {@link MPImage}. This will increase the reference count by 1.
*
* <p>This method is more useful for image consumer to acquire a reference so image resource
* will not be closed accidentally. As image creator, normal developer doesn't need to call this
* method.
*
* <p>The reference count is 1 when {@link Image} is created. Developer can call {@link
* #close()} to indicate it doesn't need this {@link Image} anymore.
* <p>The reference count is 1 when {@link MPImage} is created. Developer can call {@link
* #close()} to indicate it doesn't need this {@link MPImage} anymore.
*
* @see #close()
*/
@ -166,10 +166,10 @@ public class Image implements Closeable {
image.acquire();
}
private final Image image;
private final MPImage image;
// Only Image creates the internal helper.
private Internal(Image image) {
// Only MPImage creates the internal helper.
private Internal(MPImage image) {
this.image = image;
}
}
@ -179,15 +179,15 @@ public class Image implements Closeable {
return new Internal(this);
}
private final Map<ImageProperties, ImageContainer> containerMap;
private final Map<MPImageProperties, MPImageContainer> containerMap;
private final long timestamp;
private final int width;
private final int height;
private int referenceCount;
/** Constructs an {@link Image} with a built container. */
Image(ImageContainer container, long timestamp, int width, int height) {
/** Constructs a {@link MPImage} with a built container. */
MPImage(MPImageContainer container, long timestamp, int width, int height) {
this.containerMap = new HashMap<>();
containerMap.put(container.getImageProperties(), container);
this.timestamp = timestamp;
@ -201,10 +201,10 @@ public class Image implements Closeable {
*
* @return the current container.
*/
ImageContainer getContainer() {
MPImageContainer getContainer() {
// According to the design, in the future we will support multiple containers in one image.
// Currently just return the original container.
// TODO: Cache multiple containers in Image.
// TODO: Cache multiple containers in MPImage.
return containerMap.values().iterator().next();
}
@ -214,8 +214,8 @@ public class Image implements Closeable {
* <p>If there are multiple containers with required {@code storageType}, returns the first one.
*/
@Nullable
ImageContainer getContainer(@StorageType int storageType) {
for (Entry<ImageProperties, ImageContainer> entry : containerMap.entrySet()) {
MPImageContainer getContainer(@StorageType int storageType) {
for (Entry<MPImageProperties, MPImageContainer> entry : containerMap.entrySet()) {
if (entry.getKey().getStorageType() == storageType) {
return entry.getValue();
}
@ -225,13 +225,13 @@ public class Image implements Closeable {
/** Gets container from required {@code imageProperties}. Returns {@code null} if non existed. */
@Nullable
ImageContainer getContainer(ImageProperties imageProperties) {
MPImageContainer getContainer(MPImageProperties imageProperties) {
return containerMap.get(imageProperties);
}
/** Adds a new container if it doesn't exist. Returns {@code true} if it succeeds. */
boolean addContainer(ImageContainer container) {
ImageProperties imageProperties = container.getImageProperties();
boolean addContainer(MPImageContainer container) {
MPImageProperties imageProperties = container.getImageProperties();
if (containerMap.containsKey(imageProperties)) {
return false;
}

View File

@ -14,14 +14,14 @@ limitations under the License.
==============================================================================*/
package com.google.mediapipe.framework.image;
/** Lightweight abstraction for an object that can receive {@link Image} */
public interface ImageConsumer {
/** Lightweight abstraction for an object that can receive {@link MPImage} */
public interface MPImageConsumer {
/**
* Called when an {@link Image} is available.
* Called when a {@link MPImage} is available.
*
* <p>The argument is only guaranteed to be available until this method returns. if you need to
* extend its life time, acquire it, then release it when done.
*/
void onNewImage(Image image);
void onNewMPImage(MPImage image);
}

View File

@ -16,9 +16,9 @@ limitations under the License.
package com.google.mediapipe.framework.image;
/** Manages internal image data storage. The interface is package-private. */
interface ImageContainer {
interface MPImageContainer {
/** Returns the properties of the contained image. */
ImageProperties getImageProperties();
MPImageProperties getImageProperties();
/** Close the image container and releases the image resource inside. */
void close();

View File

@ -14,9 +14,9 @@ limitations under the License.
==============================================================================*/
package com.google.mediapipe.framework.image;
/** Lightweight abstraction for an object that produce {@link Image} */
public interface ImageProducer {
/** Lightweight abstraction for an object that produce {@link MPImage} */
public interface MPImageProducer {
/** Sets the consumer that receives the {@link Image}. */
void setImageConsumer(ImageConsumer imageConsumer);
/** Sets the consumer that receives the {@link MPImage}. */
void setMPImageConsumer(MPImageConsumer imageConsumer);
}

View File

@ -17,25 +17,25 @@ package com.google.mediapipe.framework.image;
import com.google.auto.value.AutoValue;
import com.google.auto.value.extension.memoized.Memoized;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.Image.StorageType;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
import com.google.mediapipe.framework.image.MPImage.StorageType;
/** Groups a set of properties to describe how an image is stored. */
@AutoValue
public abstract class ImageProperties {
public abstract class MPImageProperties {
/**
* Gets the pixel format of the image.
*
* @see Image.ImageFormat
* @see MPImage.MPImageFormat
*/
@ImageFormat
@MPImageFormat
public abstract int getImageFormat();
/**
* Gets the storage type of the image.
*
* @see Image.StorageType
* @see MPImage.StorageType
*/
@StorageType
public abstract int getStorageType();
@ -45,36 +45,36 @@ public abstract class ImageProperties {
public abstract int hashCode();
/**
* Creates a builder of {@link ImageProperties}.
* Creates a builder of {@link MPImageProperties}.
*
* @see ImageProperties.Builder
* @see MPImageProperties.Builder
*/
static Builder builder() {
return new AutoValue_ImageProperties.Builder();
return new AutoValue_MPImageProperties.Builder();
}
/** Builds a {@link ImageProperties}. */
/** Builds a {@link MPImageProperties}. */
@AutoValue.Builder
abstract static class Builder {
/**
* Sets the {@link Image.ImageFormat}.
* Sets the {@link MPImage.MPImageFormat}.
*
* @see ImageProperties#getImageFormat
* @see MPImageProperties#getImageFormat
*/
abstract Builder setImageFormat(@ImageFormat int value);
abstract Builder setImageFormat(@MPImageFormat int value);
/**
* Sets the {@link Image.StorageType}.
* Sets the {@link MPImage.StorageType}.
*
* @see ImageProperties#getStorageType
* @see MPImageProperties#getStorageType
*/
abstract Builder setStorageType(@StorageType int value);
/** Builds the {@link ImageProperties}. */
abstract ImageProperties build();
/** Builds the {@link MPImageProperties}. */
abstract MPImageProperties build();
}
// Hide the constructor.
ImageProperties() {}
MPImageProperties() {}
}

View File

@ -15,11 +15,12 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import android.media.Image;
import android.os.Build.VERSION_CODES;
import androidx.annotation.RequiresApi;
/**
* Builds {@link Image} from {@link android.media.Image}.
* Builds {@link MPImage} from {@link android.media.Image}.
*
* <p>Once {@link android.media.Image} is passed in, to keep data integrity you shouldn't modify
* content in it.
@ -30,7 +31,7 @@ import androidx.annotation.RequiresApi;
public class MediaImageBuilder {
// Mandatory fields.
private final android.media.Image mediaImage;
private final Image mediaImage;
// Optional fields.
private long timestamp;
@ -40,20 +41,20 @@ public class MediaImageBuilder {
*
* @param mediaImage image data object.
*/
public MediaImageBuilder(android.media.Image mediaImage) {
public MediaImageBuilder(Image mediaImage) {
this.mediaImage = mediaImage;
this.timestamp = 0;
}
/** Sets value for {@link Image#getTimestamp()}. */
/** Sets value for {@link MPImage#getTimestamp()}. */
MediaImageBuilder setTimestamp(long timestamp) {
this.timestamp = timestamp;
return this;
}
/** Builds an {@link Image} instance. */
public Image build() {
return new Image(
/** Builds a {@link MPImage} instance. */
public MPImage build() {
return new MPImage(
new MediaImageContainer(mediaImage),
timestamp,
mediaImage.getWidth(),

View File

@ -15,33 +15,34 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import android.media.Image;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import androidx.annotation.RequiresApi;
import com.google.mediapipe.framework.image.Image.ImageFormat;
import com.google.mediapipe.framework.image.MPImage.MPImageFormat;
@RequiresApi(VERSION_CODES.KITKAT)
class MediaImageContainer implements ImageContainer {
class MediaImageContainer implements MPImageContainer {
private final android.media.Image mediaImage;
private final ImageProperties properties;
private final Image mediaImage;
private final MPImageProperties properties;
public MediaImageContainer(android.media.Image mediaImage) {
public MediaImageContainer(Image mediaImage) {
this.mediaImage = mediaImage;
this.properties =
ImageProperties.builder()
.setStorageType(Image.STORAGE_TYPE_MEDIA_IMAGE)
MPImageProperties.builder()
.setStorageType(MPImage.STORAGE_TYPE_MEDIA_IMAGE)
.setImageFormat(convertFormatCode(mediaImage.getFormat()))
.build();
}
public android.media.Image getImage() {
public Image getImage() {
return mediaImage;
}
@Override
public ImageProperties getImageProperties() {
public MPImageProperties getImageProperties() {
return properties;
}
@ -50,24 +51,24 @@ class MediaImageContainer implements ImageContainer {
mediaImage.close();
}
@ImageFormat
@MPImageFormat
static int convertFormatCode(int graphicsFormat) {
// We only cover the format mentioned in
// https://developer.android.com/reference/android/media/Image#getFormat()
if (VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (graphicsFormat == android.graphics.ImageFormat.FLEX_RGBA_8888) {
return Image.IMAGE_FORMAT_RGBA;
return MPImage.IMAGE_FORMAT_RGBA;
} else if (graphicsFormat == android.graphics.ImageFormat.FLEX_RGB_888) {
return Image.IMAGE_FORMAT_RGB;
return MPImage.IMAGE_FORMAT_RGB;
}
}
switch (graphicsFormat) {
case android.graphics.ImageFormat.JPEG:
return Image.IMAGE_FORMAT_JPEG;
return MPImage.IMAGE_FORMAT_JPEG;
case android.graphics.ImageFormat.YUV_420_888:
return Image.IMAGE_FORMAT_YUV_420_888;
return MPImage.IMAGE_FORMAT_YUV_420_888;
default:
return Image.IMAGE_FORMAT_UNKNOWN;
return MPImage.IMAGE_FORMAT_UNKNOWN;
}
}
}

View File

@ -15,13 +15,14 @@ limitations under the License.
package com.google.mediapipe.framework.image;
import android.media.Image;
import android.os.Build.VERSION_CODES;
import androidx.annotation.RequiresApi;
/**
* Utility for extracting {@link android.media.Image} from {@link Image}.
* Utility for extracting {@link android.media.Image} from {@link MPImage}.
*
* <p>Currently it only supports {@link Image} with {@link Image#STORAGE_TYPE_MEDIA_IMAGE},
* <p>Currently it only supports {@link MPImage} with {@link MPImage#STORAGE_TYPE_MEDIA_IMAGE},
* otherwise {@link IllegalArgumentException} will be thrown.
*/
@RequiresApi(VERSION_CODES.KITKAT)
@ -30,20 +31,20 @@ public class MediaImageExtractor {
private MediaImageExtractor() {}
/**
* Extracts a {@link android.media.Image} from an {@link Image}. Currently it only works for
* {@link Image} that built from {@link MediaImageBuilder}.
* Extracts a {@link android.media.Image} from a {@link MPImage}. Currently it only works for
* {@link MPImage} that built from {@link MediaImageBuilder}.
*
* @param image the image to extract {@link android.media.Image} from.
* @return {@link android.media.Image} that stored in {@link Image}.
* @return {@link android.media.Image} that stored in {@link MPImage}.
* @throws IllegalArgumentException if the extraction failed.
*/
public static android.media.Image extract(Image image) {
ImageContainer container;
if ((container = image.getContainer(Image.STORAGE_TYPE_MEDIA_IMAGE)) != null) {
public static Image extract(MPImage image) {
MPImageContainer container;
if ((container = image.getContainer(MPImage.STORAGE_TYPE_MEDIA_IMAGE)) != null) {
return ((MediaImageContainer) container).getImage();
}
throw new IllegalArgumentException(
"Extract Media Image from an Image created by objects other than Media Image"
"Extract Media Image from a MPImage created by objects other than Media Image"
+ " is not supported");
}
}

View File

@ -30,7 +30,7 @@ import androidx.activity.result.contract.ActivityResultContracts;
import androidx.exifinterface.media.ExifInterface;
// ContentResolver dependency
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.core.BaseOptions;
import com.google.mediapipe.tasks.vision.core.RunningMode;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult;
@ -98,7 +98,7 @@ public class MainActivity extends AppCompatActivity {
Log.e(TAG, "Bitmap rotation error:" + e);
}
if (bitmap != null) {
Image image = new BitmapImageBuilder(bitmap).build();
MPImage image = new BitmapImageBuilder(bitmap).build();
ObjectDetectionResult detectionResult = objectDetector.detect(image);
imageView.setData(image, detectionResult);
runOnUiThread(() -> imageView.update());
@ -144,7 +144,8 @@ public class MainActivity extends AppCompatActivity {
MediaMetadataRetriever.METADATA_KEY_VIDEO_FRAME_COUNT));
long frameIntervalMs = duration / numFrames;
for (int i = 0; i < numFrames; ++i) {
Image image = new BitmapImageBuilder(metaRetriever.getFrameAtIndex(i)).build();
MPImage image =
new BitmapImageBuilder(metaRetriever.getFrameAtIndex(i)).build();
ObjectDetectionResult detectionResult =
objectDetector.detectForVideo(image, frameIntervalMs * i);
// Currently only annotates the detection result on the first video frame and

View File

@ -22,7 +22,7 @@ import android.graphics.Matrix;
import android.graphics.Paint;
import androidx.appcompat.widget.AppCompatImageView;
import com.google.mediapipe.framework.image.BitmapExtractor;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.containers.Detection;
import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult;
@ -40,12 +40,12 @@ public class ObjectDetectionResultImageView extends AppCompatImageView {
}
/**
* Sets an {@link Image} and an {@link ObjectDetectionResult} to render.
* Sets a {@link MPImage} and an {@link ObjectDetectionResult} to render.
*
* @param image an {@link Image} object for annotation.
* @param image a {@link MPImage} object for annotation.
* @param result an {@link ObjectDetectionResult} object that contains the detection result.
*/
public void setData(Image image, ObjectDetectionResult result) {
public void setData(MPImage image, ObjectDetectionResult result) {
if (image == null || result == null) {
return;
}

View File

@ -19,7 +19,7 @@ import com.google.mediapipe.formats.proto.RectProto.NormalizedRect;
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.ProtoUtil;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.core.TaskResult;
import com.google.mediapipe.tasks.core.TaskRunner;
import java.util.HashMap;
@ -77,11 +77,11 @@ public class BaseVisionTaskApi implements AutoCloseable {
* A synchronous method to process single image inputs. The call blocks the current thread until a
* failure status or a successful result is returned.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @throws MediaPipeException if the task is not in the image mode or requires a normalized rect
* input.
*/
protected TaskResult processImageData(Image image) {
protected TaskResult processImageData(MPImage image) {
if (runningMode != RunningMode.IMAGE) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),
@ -102,13 +102,13 @@ public class BaseVisionTaskApi implements AutoCloseable {
* A synchronous method to process single image inputs. The call blocks the current thread until a
* failure status or a successful result is returned.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} defining the region-of-interest to process in the image. Coordinates
* are expected to be specified as normalized values in [0,1].
* @throws MediaPipeException if the task is not in the image mode or doesn't require a normalized
* rect.
*/
protected TaskResult processImageData(Image image, RectF roi) {
protected TaskResult processImageData(MPImage image, RectF roi) {
if (runningMode != RunningMode.IMAGE) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),
@ -132,12 +132,12 @@ public class BaseVisionTaskApi implements AutoCloseable {
* A synchronous method to process continuous video frames. The call blocks the current thread
* until a failure status or a successful result is returned.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @param timestampMs the corresponding timestamp of the input image in milliseconds.
* @throws MediaPipeException if the task is not in the video mode or requires a normalized rect
* input.
*/
protected TaskResult processVideoData(Image image, long timestampMs) {
protected TaskResult processVideoData(MPImage image, long timestampMs) {
if (runningMode != RunningMode.VIDEO) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),
@ -158,14 +158,14 @@ public class BaseVisionTaskApi implements AutoCloseable {
* A synchronous method to process continuous video frames. The call blocks the current thread
* until a failure status or a successful result is returned.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} defining the region-of-interest to process in the image. Coordinates
* are expected to be specified as normalized values in [0,1].
* @param timestampMs the corresponding timestamp of the input image in milliseconds.
* @throws MediaPipeException if the task is not in the video mode or doesn't require a normalized
* rect.
*/
protected TaskResult processVideoData(Image image, RectF roi, long timestampMs) {
protected TaskResult processVideoData(MPImage image, RectF roi, long timestampMs) {
if (runningMode != RunningMode.VIDEO) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),
@ -189,12 +189,12 @@ public class BaseVisionTaskApi implements AutoCloseable {
* An asynchronous method to send live stream data to the {@link TaskRunner}. The results will be
* available in the user-defined result listener.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @param timestampMs the corresponding timestamp of the input image in milliseconds.
* @throws MediaPipeException if the task is not in the video mode or requires a normalized rect
* input.
*/
protected void sendLiveStreamData(Image image, long timestampMs) {
protected void sendLiveStreamData(MPImage image, long timestampMs) {
if (runningMode != RunningMode.LIVE_STREAM) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),
@ -215,14 +215,14 @@ public class BaseVisionTaskApi implements AutoCloseable {
* An asynchronous method to send live stream data to the {@link TaskRunner}. The results will be
* available in the user-defined result listener.
*
* @param image a MediaPipe {@link Image} object for processing.
* @param image a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} defining the region-of-interest to process in the image. Coordinates
* are expected to be specified as normalized values in [0,1].
* @param timestampMs the corresponding timestamp of the input image in milliseconds.
* @throws MediaPipeException if the task is not in the video mode or doesn't require a normalized
* rect.
*/
protected void sendLiveStreamData(Image image, RectF roi, long timestampMs) {
protected void sendLiveStreamData(MPImage image, RectF roi, long timestampMs) {
if (runningMode != RunningMode.LIVE_STREAM) {
throw new MediaPipeException(
MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(),

View File

@ -26,7 +26,7 @@ import com.google.mediapipe.framework.AndroidPacketGetter;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketGetter;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.processors.proto.ClassifierOptionsProto;
import com.google.mediapipe.tasks.core.BaseOptions;
import com.google.mediapipe.tasks.core.ErrorListener;
@ -59,7 +59,7 @@ import java.util.Optional;
* Model Maker. See <TODO link to the DevSite documentation page>.
*
* <ul>
* <li>Input image {@link Image}
* <li>Input image {@link MPImage}
* <ul>
* <li>The image that gesture recognition runs on.
* </ul>
@ -151,9 +151,9 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
public static GestureRecognizer createFromOptions(
Context context, GestureRecognizerOptions recognizerOptions) {
// TODO: Consolidate OutputHandler and TaskRunner.
OutputHandler<GestureRecognitionResult, Image> handler = new OutputHandler<>();
OutputHandler<GestureRecognitionResult, MPImage> handler = new OutputHandler<>();
handler.setOutputPacketConverter(
new OutputHandler.OutputPacketConverter<GestureRecognitionResult, Image>() {
new OutputHandler.OutputPacketConverter<GestureRecognitionResult, MPImage>() {
@Override
public GestureRecognitionResult convertToTaskResult(List<Packet> packets) {
// If there is no hands detected in the image, just returns empty lists.
@ -178,7 +178,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
}
@Override
public Image convertToTaskInput(List<Packet> packets) {
public MPImage convertToTaskInput(List<Packet> packets) {
return new BitmapImageBuilder(
AndroidPacketGetter.getBitmapFromRgb(packets.get(IMAGE_OUT_STREAM_INDEX)))
.build();
@ -222,10 +222,10 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognize(Image inputImage) {
public GestureRecognitionResult recognize(MPImage inputImage) {
// TODO: add proper support for rotations.
return (GestureRecognitionResult) processImageData(inputImage, buildFullImageRectF());
}
@ -243,11 +243,11 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognizeForVideo(Image inputImage, long inputTimestampMs) {
public GestureRecognitionResult recognizeForVideo(MPImage inputImage, long inputTimestampMs) {
// TODO: add proper support for rotations.
return (GestureRecognitionResult)
processVideoData(inputImage, buildFullImageRectF(), inputTimestampMs);
@ -267,11 +267,11 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public void recognizeAsync(Image inputImage, long inputTimestampMs) {
public void recognizeAsync(MPImage inputImage, long inputTimestampMs) {
// TODO: add proper support for rotations.
sendLiveStreamData(inputImage, buildFullImageRectF(), inputTimestampMs);
}
@ -333,7 +333,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* recognizer is in the live stream mode.
*/
public abstract Builder setResultListener(
ResultListener<GestureRecognitionResult, Image> value);
ResultListener<GestureRecognitionResult, MPImage> value);
/** Sets an optional error listener. */
public abstract Builder setErrorListener(ErrorListener value);
@ -386,7 +386,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
// TODO update gesture confidence options after score merging calculator is ready.
abstract Optional<Float> minGestureConfidence();
abstract Optional<ResultListener<GestureRecognitionResult, Image>> resultListener();
abstract Optional<ResultListener<GestureRecognitionResult, MPImage>> resultListener();
abstract Optional<ErrorListener> errorListener();

View File

@ -25,7 +25,7 @@ import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketGetter;
import com.google.mediapipe.framework.ProtoUtil;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.container.proto.ClassificationsProto;
import com.google.mediapipe.tasks.components.processors.ClassifierOptions;
import com.google.mediapipe.tasks.core.BaseOptions;
@ -164,9 +164,9 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* @throws MediaPipeException if there is an error during {@link ImageClassifier} creation.
*/
public static ImageClassifier createFromOptions(Context context, ImageClassifierOptions options) {
OutputHandler<ImageClassificationResult, Image> handler = new OutputHandler<>();
OutputHandler<ImageClassificationResult, MPImage> handler = new OutputHandler<>();
handler.setOutputPacketConverter(
new OutputHandler.OutputPacketConverter<ImageClassificationResult, Image>() {
new OutputHandler.OutputPacketConverter<ImageClassificationResult, MPImage>() {
@Override
public ImageClassificationResult convertToTaskResult(List<Packet> packets) {
try {
@ -182,7 +182,7 @@ public final class ImageClassifier extends BaseVisionTaskApi {
}
@Override
public Image convertToTaskInput(List<Packet> packets) {
public MPImage convertToTaskInput(List<Packet> packets) {
return new BitmapImageBuilder(
AndroidPacketGetter.getBitmapFromRgb(packets.get(IMAGE_OUT_STREAM_INDEX)))
.build();
@ -225,10 +225,10 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @throws MediaPipeException if there is an internal error.
*/
public ImageClassificationResult classify(Image inputImage) {
public ImageClassificationResult classify(MPImage inputImage) {
return (ImageClassificationResult) processImageData(inputImage, buildFullImageRectF());
}
@ -242,12 +242,12 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} specifying the region of interest on which to perform
* classification. Coordinates are expected to be specified as normalized values in [0,1].
* @throws MediaPipeException if there is an internal error.
*/
public ImageClassificationResult classify(Image inputImage, RectF roi) {
public ImageClassificationResult classify(MPImage inputImage, RectF roi) {
return (ImageClassificationResult) processImageData(inputImage, roi);
}
@ -264,11 +264,11 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public ImageClassificationResult classifyForVideo(Image inputImage, long inputTimestampMs) {
public ImageClassificationResult classifyForVideo(MPImage inputImage, long inputTimestampMs) {
return (ImageClassificationResult)
processVideoData(inputImage, buildFullImageRectF(), inputTimestampMs);
}
@ -286,14 +286,14 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} specifying the region of interest on which to perform
* classification. Coordinates are expected to be specified as normalized values in [0,1].
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public ImageClassificationResult classifyForVideo(
Image inputImage, RectF roi, long inputTimestampMs) {
MPImage inputImage, RectF roi, long inputTimestampMs) {
return (ImageClassificationResult) processVideoData(inputImage, roi, inputTimestampMs);
}
@ -311,11 +311,11 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public void classifyAsync(Image inputImage, long inputTimestampMs) {
public void classifyAsync(MPImage inputImage, long inputTimestampMs) {
sendLiveStreamData(inputImage, buildFullImageRectF(), inputTimestampMs);
}
@ -334,13 +334,13 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param roi a {@link RectF} specifying the region of interest on which to perform
* classification. Coordinates are expected to be specified as normalized values in [0,1].
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public void classifyAsync(Image inputImage, RectF roi, long inputTimestampMs) {
public void classifyAsync(MPImage inputImage, RectF roi, long inputTimestampMs) {
sendLiveStreamData(inputImage, roi, inputTimestampMs);
}
@ -379,7 +379,7 @@ public final class ImageClassifier extends BaseVisionTaskApi {
* the image classifier is in the live stream mode.
*/
public abstract Builder setResultListener(
ResultListener<ImageClassificationResult, Image> resultListener);
ResultListener<ImageClassificationResult, MPImage> resultListener);
/** Sets an optional {@link ErrorListener}. */
public abstract Builder setErrorListener(ErrorListener errorListener);
@ -416,7 +416,7 @@ public final class ImageClassifier extends BaseVisionTaskApi {
abstract Optional<ClassifierOptions> classifierOptions();
abstract Optional<ResultListener<ImageClassificationResult, Image>> resultListener();
abstract Optional<ResultListener<ImageClassificationResult, MPImage>> resultListener();
abstract Optional<ErrorListener> errorListener();

View File

@ -22,7 +22,7 @@ import com.google.mediapipe.framework.AndroidPacketGetter;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketGetter;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.core.BaseOptions;
import com.google.mediapipe.tasks.core.ErrorListener;
import com.google.mediapipe.tasks.core.OutputHandler;
@ -162,9 +162,9 @@ public final class ObjectDetector extends BaseVisionTaskApi {
public static ObjectDetector createFromOptions(
Context context, ObjectDetectorOptions detectorOptions) {
// TODO: Consolidate OutputHandler and TaskRunner.
OutputHandler<ObjectDetectionResult, Image> handler = new OutputHandler<>();
OutputHandler<ObjectDetectionResult, MPImage> handler = new OutputHandler<>();
handler.setOutputPacketConverter(
new OutputHandler.OutputPacketConverter<ObjectDetectionResult, Image>() {
new OutputHandler.OutputPacketConverter<ObjectDetectionResult, MPImage>() {
@Override
public ObjectDetectionResult convertToTaskResult(List<Packet> packets) {
return ObjectDetectionResult.create(
@ -174,7 +174,7 @@ public final class ObjectDetector extends BaseVisionTaskApi {
}
@Override
public Image convertToTaskInput(List<Packet> packets) {
public MPImage convertToTaskInput(List<Packet> packets) {
return new BitmapImageBuilder(
AndroidPacketGetter.getBitmapFromRgb(packets.get(IMAGE_OUT_STREAM_INDEX)))
.build();
@ -217,10 +217,10 @@ public final class ObjectDetector extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @throws MediaPipeException if there is an internal error.
*/
public ObjectDetectionResult detect(Image inputImage) {
public ObjectDetectionResult detect(MPImage inputImage) {
return (ObjectDetectionResult) processImageData(inputImage);
}
@ -237,11 +237,11 @@ public final class ObjectDetector extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public ObjectDetectionResult detectForVideo(Image inputImage, long inputTimestampMs) {
public ObjectDetectionResult detectForVideo(MPImage inputImage, long inputTimestampMs) {
return (ObjectDetectionResult) processVideoData(inputImage, inputTimestampMs);
}
@ -259,11 +259,11 @@ public final class ObjectDetector extends BaseVisionTaskApi {
* <li>{@link Bitmap.Config.ARGB_8888}
* </ul>
*
* @param inputImage a MediaPipe {@link Image} object for processing.
* @param inputImage a MediaPipe {@link MPImage} object for processing.
* @param inputTimestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public void detectAsync(Image inputImage, long inputTimestampMs) {
public void detectAsync(MPImage inputImage, long inputTimestampMs) {
sendLiveStreamData(inputImage, inputTimestampMs);
}
@ -333,7 +333,8 @@ public final class ObjectDetector extends BaseVisionTaskApi {
* Sets the {@link ResultListener} to receive the detection results asynchronously when the
* object detector is in the live stream mode.
*/
public abstract Builder setResultListener(ResultListener<ObjectDetectionResult, Image> value);
public abstract Builder setResultListener(
ResultListener<ObjectDetectionResult, MPImage> value);
/** Sets an optional {@link ErrorListener}}. */
public abstract Builder setErrorListener(ErrorListener value);
@ -378,7 +379,7 @@ public final class ObjectDetector extends BaseVisionTaskApi {
abstract List<String> categoryDenylist();
abstract Optional<ResultListener<ObjectDetectionResult, Image>> resultListener();
abstract Optional<ResultListener<ObjectDetectionResult, MPImage>> resultListener();
abstract Optional<ErrorListener> errorListener();

View File

@ -25,7 +25,7 @@ import com.google.common.truth.Correspondence;
import com.google.mediapipe.formats.proto.ClassificationProto;
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.containers.Category;
import com.google.mediapipe.tasks.components.containers.Landmark;
import com.google.mediapipe.tasks.components.containers.proto.LandmarksDetectionResultProto.LandmarksDetectionResult;
@ -357,7 +357,7 @@ public class GestureRecognizerTest {
@Test
public void recognize_failsWithOutOfOrderInputTimestamps() throws Exception {
Image image = getImageFromAsset(THUMB_UP_IMAGE);
MPImage image = getImageFromAsset(THUMB_UP_IMAGE);
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL, THUMB_UP_INDEX);
GestureRecognizerOptions options =
@ -391,7 +391,7 @@ public class GestureRecognizerTest {
@Test
public void recognize_successWithLiveSteamMode() throws Exception {
Image image = getImageFromAsset(THUMB_UP_IMAGE);
MPImage image = getImageFromAsset(THUMB_UP_IMAGE);
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL, THUMB_UP_INDEX);
GestureRecognizerOptions options =
@ -420,7 +420,7 @@ public class GestureRecognizerTest {
}
}
private static Image getImageFromAsset(String filePath) throws Exception {
private static MPImage getImageFromAsset(String filePath) throws Exception {
AssetManager assetManager = ApplicationProvider.getApplicationContext().getAssets();
InputStream istr = assetManager.open(filePath);
return new BitmapImageBuilder(BitmapFactory.decodeStream(istr)).build();
@ -487,7 +487,7 @@ public class GestureRecognizerTest {
assertThat(expectedGesture.categoryName()).isEqualTo(expectedGesture.categoryName());
}
private static void assertImageSizeIsExpected(Image inputImage) {
private static void assertImageSizeIsExpected(MPImage inputImage) {
assertThat(inputImage).isNotNull();
assertThat(inputImage.getWidth()).isEqualTo(IMAGE_WIDTH);
assertThat(inputImage.getHeight()).isEqualTo(IMAGE_HEIGHT);

View File

@ -24,7 +24,7 @@ import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.containers.Category;
import com.google.mediapipe.tasks.components.processors.ClassifierOptions;
import com.google.mediapipe.tasks.core.BaseOptions;
@ -342,7 +342,7 @@ public class ImageClassifierTest {
@Test
public void classify_succeedsWithVideoMode() throws Exception {
Image image = getImageFromAsset(BURGER_IMAGE);
MPImage image = getImageFromAsset(BURGER_IMAGE);
ImageClassifierOptions options =
ImageClassifierOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(FLOAT_MODEL_FILE).build())
@ -361,7 +361,7 @@ public class ImageClassifierTest {
@Test
public void classify_failsWithOutOfOrderInputTimestamps() throws Exception {
Image image = getImageFromAsset(BURGER_IMAGE);
MPImage image = getImageFromAsset(BURGER_IMAGE);
ImageClassifierOptions options =
ImageClassifierOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(FLOAT_MODEL_FILE).build())
@ -388,7 +388,7 @@ public class ImageClassifierTest {
@Test
public void classify_succeedsWithLiveStreamMode() throws Exception {
Image image = getImageFromAsset(BURGER_IMAGE);
MPImage image = getImageFromAsset(BURGER_IMAGE);
ImageClassifierOptions options =
ImageClassifierOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(FLOAT_MODEL_FILE).build())
@ -411,7 +411,7 @@ public class ImageClassifierTest {
}
}
private static Image getImageFromAsset(String filePath) throws Exception {
private static MPImage getImageFromAsset(String filePath) throws Exception {
AssetManager assetManager = ApplicationProvider.getApplicationContext().getAssets();
InputStream istr = assetManager.open(filePath);
return new BitmapImageBuilder(BitmapFactory.decodeStream(istr)).build();
@ -437,7 +437,7 @@ public class ImageClassifierTest {
}
}
private static void assertImageSizeIsExpected(Image inputImage) {
private static void assertImageSizeIsExpected(MPImage inputImage) {
assertThat(inputImage).isNotNull();
assertThat(inputImage.getWidth()).isEqualTo(480);
assertThat(inputImage.getHeight()).isEqualTo(325);

View File

@ -24,7 +24,7 @@ import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.mediapipe.framework.MediaPipeException;
import com.google.mediapipe.framework.image.BitmapImageBuilder;
import com.google.mediapipe.framework.image.Image;
import com.google.mediapipe.framework.image.MPImage;
import com.google.mediapipe.tasks.components.containers.Category;
import com.google.mediapipe.tasks.components.containers.Detection;
import com.google.mediapipe.tasks.core.BaseOptions;
@ -370,7 +370,7 @@ public class ObjectDetectorTest {
@Test
public void detect_failsWithOutOfOrderInputTimestamps() throws Exception {
Image image = getImageFromAsset(CAT_AND_DOG_IMAGE);
MPImage image = getImageFromAsset(CAT_AND_DOG_IMAGE);
ObjectDetectorOptions options =
ObjectDetectorOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(MODEL_FILE).build())
@ -395,7 +395,7 @@ public class ObjectDetectorTest {
@Test
public void detect_successWithLiveSteamMode() throws Exception {
Image image = getImageFromAsset(CAT_AND_DOG_IMAGE);
MPImage image = getImageFromAsset(CAT_AND_DOG_IMAGE);
ObjectDetectorOptions options =
ObjectDetectorOptions.builder()
.setBaseOptions(BaseOptions.builder().setModelAssetPath(MODEL_FILE).build())
@ -416,7 +416,7 @@ public class ObjectDetectorTest {
}
}
private static Image getImageFromAsset(String filePath) throws Exception {
private static MPImage getImageFromAsset(String filePath) throws Exception {
AssetManager assetManager = ApplicationProvider.getApplicationContext().getAssets();
InputStream istr = assetManager.open(filePath);
return new BitmapImageBuilder(BitmapFactory.decodeStream(istr)).build();
@ -448,7 +448,7 @@ public class ObjectDetectorTest {
assertThat(boundingBox1.bottom).isWithin(PIXEL_DIFF_TOLERANCE).of(boundingBox2.bottom);
}
private static void assertImageSizeIsExpected(Image inputImage) {
private static void assertImageSizeIsExpected(MPImage inputImage) {
assertThat(inputImage).isNotNull();
assertThat(inputImage.getWidth()).isEqualTo(IMAGE_WIDTH);
assertThat(inputImage.getHeight()).isEqualTo(IMAGE_HEIGHT);