Added methods to create iOS MPImage with source type UIImage from a C++ image.

This commit is contained in:
Prianka Liz Kariat 2023-09-28 21:37:03 +05:30
parent a577dc3043
commit 8ea805b6f0
3 changed files with 195 additions and 85 deletions

View File

@ -14,6 +14,7 @@ objc_library(
deps = [ deps = [
"//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image",
"//mediapipe/tasks/ios/common:MPPCommon", "//mediapipe/tasks/ios/common:MPPCommon",
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils", "//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
"//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPImage",

View File

@ -14,7 +14,9 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@ -25,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN
@interface MPPImage (Utils) @interface MPPImage (Utils)
/** /**
* Converts the `MPPImage` into a `mediapipe::ImageFrame`. * Converts the `MPPImage` into a `mediapipe::ImageFrame`.
* Irrespective of whether the underlying buffer is grayscale, RGB, RGBA, BGRA etc., the MPPImage is * Irrespective of whether the underlying buffer is grayscale, RGBA, BGRA etc., the `MPPImage` is
* converted to an RGB format. In case of grayscale images, the mono channel is duplicated in the R, * converted to an RGBA format. In case of grayscale images, the mono channel is duplicated in the
* G, B channels. * R, G, B channels.
* *
* @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no
* error will be saved. * error will be saved.
@ -36,6 +38,32 @@ NS_ASSUME_NONNULL_BEGIN
*/ */
- (std::unique_ptr<mediapipe::ImageFrame>)imageFrameWithError:(NSError **)error; - (std::unique_ptr<mediapipe::ImageFrame>)imageFrameWithError:(NSError **)error;
/**
* Initializes an `MPPImage` object with the pixels from the given `mediapipe::Image` and source
* type and orientation from the given source image.
*
* Only supports initialization from `mediapipe::Image` of format RGBA.
* If `shouldCopyPixelData` is set to `YES`, the newly created `MPPImage` stores a reference to a
* deep copied pixel data of the given `image`. Since deep copies are expensive, it is recommended
* to not set `shouldCopyPixelData` unless the `MPPImage` must outlive the passed in `image`.
*
* @param image The `mediapipe::Image` whose pixels are used for creating the `MPPImage`.
* @param sourceImage The `MPPImage` whose `orientation` and `imageSourceType` are used for creating
* the new `MPPImage`.
* @param shouldCopyPixelData `BOOL` that determines if the newly created `MPPImage` stores a
* reference to a deep copied pixel data of the given `image` or not.
*
* @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no
* error will be saved.
*
* @return A new `MPImage` instance with the pixels from the given `mediapipe::Image` and meta data
* equal to the `sourceImage`. `nil` if there is an error in initializing the `MPPImage`.
*/
- (nullable instancetype)initWithCppImage:(mediapipe::Image &)image
cloningPropertiesOfSourceImage:(MPPImage *)sourceImage
shouldCopyPixelData:(BOOL)shouldCopyPixelData
error:(NSError **)error;
@end @end
NS_ASSUME_NONNULL_END NS_ASSUME_NONNULL_END

View File

@ -25,12 +25,22 @@
#include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_format.pb.h"
namespace { namespace {
using ::mediapipe::Image;
using ::mediapipe::ImageFormat;
using ::mediapipe::ImageFrame; using ::mediapipe::ImageFrame;
vImage_Buffer allocatedVImageBuffer(vImagePixelCount width, vImagePixelCount height,
size_t rowBytes) {
UInt8 *data = new UInt8[height * rowBytes];
return {.data = data, .width = width, .height = height, .rowBytes = rowBytes};
} }
} // namespace
@interface MPPPixelDataUtils : NSObject @interface MPPPixelDataUtils : NSObject
+ (uint8_t *)rgbPixelDataFromPixelData:(uint8_t *)pixelData + (std::unique_ptr<ImageFrame>)imageFrameFromPixelData:(uint8_t *)pixelData
withWidth:(size_t)width withWidth:(size_t)width
height:(size_t)height height:(size_t)height
stride:(size_t)stride stride:(size_t)stride
@ -49,6 +59,7 @@ using ::mediapipe::ImageFrame;
@interface MPPCGImageUtils : NSObject @interface MPPCGImageUtils : NSObject
+ (std::unique_ptr<ImageFrame>)imageFrameFromCGImage:(CGImageRef)cgImage error:(NSError **)error; + (std::unique_ptr<ImageFrame>)imageFrameFromCGImage:(CGImageRef)cgImage error:(NSError **)error;
+ (CGImageRef)cgImageFromImageFrame:(ImageFrame &)imageFrame error:(NSError **)error;
@end @end
@ -60,42 +71,44 @@ using ::mediapipe::ImageFrame;
@implementation MPPPixelDataUtils : NSObject @implementation MPPPixelDataUtils : NSObject
+ (uint8_t *)rgbPixelDataFromPixelData:(uint8_t *)pixelData + (std::unique_ptr<ImageFrame>)imageFrameFromPixelData:(uint8_t *)pixelData
withWidth:(size_t)width withWidth:(size_t)width
height:(size_t)height height:(size_t)height
stride:(size_t)stride stride:(size_t)stride
pixelBufferFormat:(OSType)pixelBufferFormatType pixelBufferFormat:(OSType)pixelBufferFormatType
error:(NSError **)error { error:(NSError **)error {
NSInteger destinationChannelCount = 3; NSInteger destinationChannelCount = 4;
size_t destinationBytesPerRow = width * destinationChannelCount; size_t destinationBytesPerRow = width * destinationChannelCount;
uint8_t *destPixelBufferAddress = ImageFormat::Format imageFormat = ImageFormat::SRGBA;
(uint8_t *)[MPPCommonUtils mallocWithSize:sizeof(uint8_t) * height * destinationBytesPerRow
error:error];
if (!destPixelBufferAddress) {
return NULL;
}
vImage_Buffer srcBuffer = {.data = pixelData, vImage_Buffer srcBuffer = {.data = pixelData,
.height = (vImagePixelCount)height, .height = (vImagePixelCount)height,
.width = (vImagePixelCount)width, .width = (vImagePixelCount)width,
.rowBytes = stride}; .rowBytes = stride};
vImage_Buffer destBuffer = {.data = destPixelBufferAddress, vImage_Buffer destBuffer;
.height = (vImagePixelCount)height,
.width = (vImagePixelCount)width,
.rowBytes = destinationBytesPerRow};
vImage_Error convertError = kvImageNoError; vImage_Error convertError = kvImageNoError;
// Convert the raw pixel data to RGBA format and un-premultiply the alpha from the R, G, B values
// since MediaPipe C++ APIs only accept un pre-multiplied channels.
switch (pixelBufferFormatType) { switch (pixelBufferFormatType) {
case kCVPixelFormatType_32RGBA: { case kCVPixelFormatType_32RGBA: {
convertError = vImageConvert_RGBA8888toRGB888(&srcBuffer, &destBuffer, kvImageNoFlags); destBuffer = allocatedVImageBuffer((vImagePixelCount)width, (vImagePixelCount)height,
destinationBytesPerRow);
convertError = vImageUnpremultiplyData_RGBA8888(&srcBuffer, &destBuffer, kvImageNoFlags);
break; break;
} }
case kCVPixelFormatType_32BGRA: { case kCVPixelFormatType_32BGRA: {
convertError = vImageConvert_BGRA8888toRGB888(&srcBuffer, &destBuffer, kvImageNoFlags); const uint8_t permute_map[4] = {2, 1, 0, 3};
destBuffer = allocatedVImageBuffer((vImagePixelCount)width, (vImagePixelCount)height,
destinationBytesPerRow);
convertError =
vImagePermuteChannels_ARGB8888(&srcBuffer, &destBuffer, permute_map, kvImageNoFlags);
if (convertError == kvImageNoError) {
convertError = vImageUnpremultiplyData_RGBA8888(&destBuffer, &destBuffer, kvImageNoFlags);
}
break; break;
} }
default: { default: {
@ -103,9 +116,7 @@ using ::mediapipe::ImageFrame;
withCode:MPPTasksErrorCodeInvalidArgumentError withCode:MPPTasksErrorCodeInvalidArgumentError
description:@"Invalid source pixel buffer format. Expecting one of " description:@"Invalid source pixel buffer format. Expecting one of "
@"kCVPixelFormatType_32RGBA, kCVPixelFormatType_32BGRA"]; @"kCVPixelFormatType_32RGBA, kCVPixelFormatType_32BGRA"];
return nullptr;
free(destPixelBufferAddress);
return NULL;
} }
} }
@ -113,58 +124,35 @@ using ::mediapipe::ImageFrame;
[MPPCommonUtils createCustomError:error [MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInternalError withCode:MPPTasksErrorCodeInternalError
description:@"Image format conversion failed."]; description:@"Image format conversion failed."];
return nullptr;
free(destPixelBufferAddress);
return NULL;
} }
return destPixelBufferAddress; // Uses default deleter
return absl::make_unique<ImageFrame>(imageFormat, width, height, destinationBytesPerRow,
static_cast<uint8 *>(destBuffer.data));
} }
@end @end
@implementation MPPCVPixelBufferUtils @implementation MPPCVPixelBufferUtils
+ (std::unique_ptr<ImageFrame>)rgbImageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
error:(NSError **)error {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
size_t destinationChannelCount = 3;
size_t destinationStride = destinationChannelCount * width;
uint8_t *rgbPixelData = [MPPPixelDataUtils
rgbPixelDataFromPixelData:(uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer)
withWidth:CVPixelBufferGetWidth(pixelBuffer)
height:CVPixelBufferGetHeight(pixelBuffer)
stride:CVPixelBufferGetBytesPerRow(pixelBuffer)
pixelBufferFormat:CVPixelBufferGetPixelFormatType(pixelBuffer)
error:error];
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (!rgbPixelData) {
return nullptr;
}
std::unique_ptr<ImageFrame> imageFrame =
absl::make_unique<ImageFrame>(::mediapipe::ImageFormat::SRGB, width, height,
destinationStride, static_cast<uint8 *>(rgbPixelData),
/*deleter=*/free);
return imageFrame;
}
+ (std::unique_ptr<ImageFrame>)imageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer + (std::unique_ptr<ImageFrame>)imageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
error:(NSError **)error { error:(NSError **)error {
OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); OSType pixelBufferFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
std::unique_ptr<ImageFrame> imageFrame = nullptr;
switch (pixelBufferFormat) { switch (pixelBufferFormat) {
case kCVPixelFormatType_32RGBA: case kCVPixelFormatType_32RGBA:
case kCVPixelFormatType_32BGRA: { case kCVPixelFormatType_32BGRA: {
return [MPPCVPixelBufferUtils rgbImageFrameFromCVPixelBuffer:pixelBuffer error:error]; CVPixelBufferLockBaseAddress(pixelBuffer, 0);
imageFrame = [MPPPixelDataUtils
imageFrameFromPixelData:(uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer)
withWidth:CVPixelBufferGetWidth(pixelBuffer)
height:CVPixelBufferGetHeight(pixelBuffer)
stride:CVPixelBufferGetBytesPerRow(pixelBuffer)
pixelBufferFormat:pixelBufferFormat
error:error];
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
} }
default: { default: {
[MPPCommonUtils createCustomError:error [MPPCommonUtils createCustomError:error
@ -175,13 +163,19 @@ using ::mediapipe::ImageFrame;
} }
} }
return nullptr; return imageFrame;
} }
@end @end
@implementation MPPCGImageUtils @implementation MPPCGImageUtils
namespace {
static void FreeDataProviderReleaseCallback(void *info, const void *data, size_t size) {
free(info);
}
} // namespace
+ (std::unique_ptr<ImageFrame>)imageFrameFromCGImage:(CGImageRef)cgImage error:(NSError **)error { + (std::unique_ptr<ImageFrame>)imageFrameFromCGImage:(CGImageRef)cgImage error:(NSError **)error {
size_t width = CGImageGetWidth(cgImage); size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage); size_t height = CGImageGetHeight(cgImage);
@ -195,15 +189,20 @@ using ::mediapipe::ImageFrame;
UInt8 *pixelDataToReturn = NULL; UInt8 *pixelDataToReturn = NULL;
std::unique_ptr<ImageFrame> imageFrame = nullptr;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// iOS infers bytesPerRow if it is set to 0. // iOS infers bytesPerRow if it is set to 0.
// See https://developer.apple.com/documentation/coregraphics/1455939-cgbitmapcontextcreate // See https://developer.apple.com/documentation/coregraphics/1455939-cgbitmapcontextcreate
// But for segmentation test image, this was not the case. // But for segmentation test image, this was not the case.
// Hence setting it to the value of channelCount*width. // Hence setting it to the value of channelCount*width.
// kCGImageAlphaNoneSkipLast specifies that Alpha will always be next to B. // kCGImageAlphaPremultipliedLast specifies that Alpha will always be next to B and the R, G, B
// values will be pre multiplied with alpha. Images with alpha != 255 are stored with the R, G, B
// values premultiplied with alpha by iOS. Hence `kCGImageAlphaPremultipliedLast` ensures all
// kinds of images (alpha from 0 to 255) are correctly accounted for by iOS.
// kCGBitmapByteOrder32Big specifies that R will be stored before B. // kCGBitmapByteOrder32Big specifies that R will be stored before B.
// In combination they signify a pixelFormat of kCVPixelFormatType32RGBA. // In combination they signify a pixelFormat of kCVPixelFormatType32RGBA.
CGBitmapInfo bitMapinfoFor32RGBA = kCGImageAlphaNoneSkipLast | kCGBitmapByteOrder32Big; CGBitmapInfo bitMapinfoFor32RGBA = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(nil, width, height, bitsPerComponent, bytesPerRow, CGContextRef context = CGBitmapContextCreate(nil, width, height, bitsPerComponent, bytesPerRow,
colorSpace, bitMapinfoFor32RGBA); colorSpace, bitMapinfoFor32RGBA);
@ -214,7 +213,7 @@ using ::mediapipe::ImageFrame;
if (srcData) { if (srcData) {
// We have drawn the image as an RGBA image with 8 bitsPerComponent and hence can safely input // We have drawn the image as an RGBA image with 8 bitsPerComponent and hence can safely input
// a pixel format of type kCVPixelFormatType_32RGBA for conversion by vImage. // a pixel format of type kCVPixelFormatType_32RGBA for conversion by vImage.
pixelDataToReturn = [MPPPixelDataUtils rgbPixelDataFromPixelData:srcData imageFrame = [MPPPixelDataUtils imageFrameFromPixelData:srcData
withWidth:width withWidth:width
height:height height:height
stride:bytesPerRow stride:bytesPerRow
@ -227,16 +226,78 @@ using ::mediapipe::ImageFrame;
CGColorSpaceRelease(colorSpace); CGColorSpaceRelease(colorSpace);
if (!pixelDataToReturn) { return imageFrame;
return nullptr; }
+ (CGImageRef)cgImageFromImageFrame:(std::shared_ptr<ImageFrame>)imageFrame
shouldCopyPixelData:(BOOL)shouldCopyPixelData
error:(NSError **)error {
CGBitmapInfo bitmapInfo = kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault;
ImageFrame *internalImageFrame = imageFrame.get();
size_t channelCount = 4;
switch (internalImageFrame->Format()) {
case ImageFormat::SRGBA: {
bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
break;
}
default:
[MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInternalError
description:@"Unsupported Image Format Conversion."];
return NULL;
} }
std::unique_ptr<ImageFrame> imageFrame = absl::make_unique<ImageFrame>( size_t bitsPerComponent = 8;
mediapipe::ImageFormat::SRGB, (int)width, (int)height, (int)destinationBytesPerRow,
static_cast<uint8 *>(pixelDataToReturn),
/*deleter=*/free);
return imageFrame; UInt8 *pixelBufferAddress = NULL;
vImage_Buffer sourceBuffer = {
.data = (void *)internalImageFrame->MutablePixelData(),
.width = static_cast<vImagePixelCount>(internalImageFrame->Width()),
.height = static_cast<vImagePixelCount>(internalImageFrame->Height()),
.rowBytes = static_cast<size_t>(internalImageFrame->WidthStep())};
vImage_Buffer destBuffer;
CGDataProviderReleaseDataCallback callback = NULL;
if (shouldCopyPixelData) {
destBuffer = allocatedVImageBuffer(static_cast<vImagePixelCount>(internalImageFrame->Width()),
static_cast<vImagePixelCount>(internalImageFrame->Height()),
static_cast<size_t>(internalImageFrame->WidthStep()));
callback = FreeDataProviderReleaseCallback;
} else {
destBuffer = sourceBuffer;
}
// Pre-multiply the raw pixels from a `mediapipe::Image` before creating a `CGImage` to ensure
// that pixels are displayed correctly irrespective of their alpha values.
vImage_Error convertError =
vImagePremultiplyData_RGBA8888(&sourceBuffer, &destBuffer, kvImageNoFlags);
if (convertError != kvImageNoError) {
[MPPCommonUtils createCustomError:error
withCode:MPPTasksErrorCodeInternalError
description:@"Image format conversion failed."];
return NULL;
}
CGDataProviderRef provider = CGDataProviderCreateWithData(
destBuffer.data, destBuffer.data,
internalImageFrame->WidthStep() * internalImageFrame->Height(), callback);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImageRef =
CGImageCreate(internalImageFrame->Width(), internalImageFrame->Height(), bitsPerComponent,
bitsPerComponent * channelCount, internalImageFrame->WidthStep(), colorSpace,
bitmapInfo, provider, NULL, YES, kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return cgImageRef;
} }
@end @end
@ -277,6 +338,26 @@ using ::mediapipe::ImageFrame;
@implementation MPPImage (Utils) @implementation MPPImage (Utils)
- (nullable instancetype)initWithCppImage:(mediapipe::Image &)image
cloningPropertiesOfSourceImage:(MPPImage *)sourceImage
shouldCopyPixelData:(BOOL)shouldCopyPixelData
error:(NSError **)error {
switch (sourceImage.imageSourceType) {
case MPPImageSourceTypeImage: {
CGImageRef cgImageRef = [MPPCGImageUtils cgImageFromImageFrame:image.GetImageFrameSharedPtr()
shouldCopyPixelData:shouldCopyPixelData
error:error];
UIImage *image = [UIImage imageWithCGImage:cgImageRef];
CGImageRelease(cgImageRef);
return [[MPPImage alloc] initWithUIImage:image orientation:sourceImage.orientation error:nil];
}
default:
// TODO Implement Other Source Types.
return nil;
}
}
- (std::unique_ptr<ImageFrame>)imageFrameWithError:(NSError **)error { - (std::unique_ptr<ImageFrame>)imageFrameWithError:(NSError **)error {
switch (self.imageSourceType) { switch (self.imageSourceType) {
case MPPImageSourceTypeSampleBuffer: { case MPPImageSourceTypeSampleBuffer: {