Updated formatting

This commit is contained in:
Prianka Liz Kariat 2023-02-16 01:33:33 +05:30
parent 42e35503d9
commit ae05c78443
5 changed files with 31 additions and 33 deletions

View File

@ -1,4 +1,4 @@
// Copyright 2019 The MediaPipe Authors.
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -19,19 +19,18 @@
#include "mediapipe/framework/formats/image.h"
namespace {
using ::mediapipe::Image;
using ::mediapipe::MakePacket;
using ::mediapipe::Packet;
using ::mediapipe::Image;
} // namespace
struct freeDeleter {
void operator()(void* ptr) { free(ptr); }
void operator()(void *ptr) { free(ptr); }
}
@implementation MPPVisionPacketCreator
+ (Packet)createWithMPPImage:(MPPImage *)image error:(NSError **)error {
std::unique_ptr<ImageFrame> imageFrame = [image imageFrameWithError:error];
if (!imageFrame) {

View File

@ -14,8 +14,8 @@
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
#include "mediapipe/framework/formats/image_frame.h"
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
NS_ASSUME_NONNULL_BEGIN
@ -25,8 +25,9 @@ NS_ASSUME_NONNULL_BEGIN
@interface MPPImage (Utils)
/**
* Converts the `MPPImage` into a `mediapipe::ImageFrame`.
* Irrespective of whether the underlying buffer is grayscale, RGB, RGBA, BGRA etc., the MPPImage is converted to an RGB format. In case of grayscale images, the mono channel is duplicated
* in the R, G, B channels.
* Irrespective of whether the underlying buffer is grayscale, RGB, RGBA, BGRA etc., the MPPImage is
* converted to an RGB format. In case of grayscale images, the mono channel is duplicated in the R,
* G, B channels.
*
* @param error Pointer to the memory location where errors if any should be
* saved. If @c NULL, no error will be saved.

View File

@ -25,7 +25,7 @@
#include "mediapipe/framework/formats/image_format.pb.h"
namespace {
using ::mediapipe::ImageFrame;
using ::mediapipe::ImageFrame;
}
@interface MPPPixelDataUtils : NSObject
@ -41,7 +41,8 @@ namespace {
@interface MPPCVPixelBufferUtils : NSObject
+ (std::unique_ptr<ImageFrame>)imageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error;
+ (std::unique_ptr<ImageFrame>)imageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
error:(NSError **)error;
@end
@ -124,15 +125,14 @@ namespace {
@implementation MPPCVPixelBufferUtils
+ (std::unique_ptr<ImageFrame>)rgbImageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error {
+ (std::unique_ptr<ImageFrame>)rgbImageFrameFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
error:(NSError **)error {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
uint8_t *rgbPixelData = [MPPPixelDataUtils
rgbPixelDataFromPixelData:(uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer)
withWidth:CVPixelBufferGetWidth(pixelBuffer)
@ -147,9 +147,9 @@ namespace {
return nullptr;
}
std::unique_ptr<ImageFrame> imageFrame = absl::make_unique<ImageFrame>(
::mediapipe::ImageFormat::SRGB, /*width=*/width, /*height=*/height, stride,
static_cast<uint8*>(rgbPixelData),
std::unique_ptr<ImageFrame> imageFrame =
absl::make_unique<ImageFrame>(::mediapipe::ImageFormat::SRGB, /*width=*/width,
/*height=*/height, stride, static_cast<uint8 *>(rgbPixelData),
/*deleter=*/free);
return imageFrame;
@ -224,7 +224,7 @@ namespace {
std::unique_ptr<ImageFrame> imageFrame = absl::make_unique<ImageFrame>(
mediapipe::ImageFormat::SRGB, /*width=*/(int)width, /*height=*/(int)height, (int)bytesPerRow,
static_cast<uint8*>(pixelDataToReturn),
static_cast<uint8 *>(pixelDataToReturn),
/*deleter=*/free);
return imageFrame;
@ -235,10 +235,8 @@ namespace {
@implementation UIImage (ImageFrameUtils)
- (std::unique_ptr<ImageFrame>)imageFrameFromCIImageWithError:(NSError **)error {
if (self.CIImage.pixelBuffer) {
return [MPPCVPixelBufferUtils imageFrameFromCVPixelBuffer:self.CIImage.pixelBuffer
error:error];
return [MPPCVPixelBufferUtils imageFrameFromCVPixelBuffer:self.CIImage.pixelBuffer error:error];
} else if (self.CIImage.CGImage) {
return [MPPCGImageUtils imageFrameFromCGImage:self.CIImage.CGImage error:error];
@ -278,8 +276,7 @@ namespace {
switch (self.imageSourceType) {
case MPPImageSourceTypeSampleBuffer: {
CVPixelBufferRef sampleImagePixelBuffer = CMSampleBufferGetImageBuffer(self.sampleBuffer);
return [MPPCVPixelBufferUtils imageFrameFromCVPixelBuffer:sampleImagePixelBuffer
error:error];
return [MPPCVPixelBufferUtils imageFrameFromCVPixelBuffer:sampleImagePixelBuffer error:error];
}
case MPPImageSourceTypePixelBuffer: {
return [MPPCVPixelBufferUtils imageFrameFromCVPixelBuffer:self.pixelBuffer error:error];

View File

@ -29,7 +29,8 @@ NS_SWIFT_NAME(ImageClassifierOptions)
@property(nonatomic) MPPRunningMode runningMode;
/**
* The user-defined result callback for processing live stream data. The result callback should only be specified when the running mode is set to the live stream mode.
* The user-defined result callback for processing live stream data. The result callback should only
* be specified when the running mode is set to the live stream mode.
*/
@property(nonatomic, copy) void (^completion)(MPPImageClassifierResult *result, NSError *error);