Merge branch 'master' into ios-ml-image-utils
This commit is contained in:
commit
b940a19462
|
@ -12,8 +12,6 @@ nav_order: 1
|
|||
{:toc}
|
||||
---
|
||||
|
||||
## C++ Graph Builder
|
||||
|
||||
C++ graph builder is a powerful tool for:
|
||||
|
||||
* Building complex graphs
|
||||
|
@ -25,7 +23,7 @@ C++ graph builder is a powerful tool for:
|
|||
* Supporting optional graph inputs/outputs
|
||||
* Customizing graphs per platform
|
||||
|
||||
### Basic Usage
|
||||
## Basic Usage
|
||||
|
||||
Let's see how C++ graph builder can be used for a simple graph:
|
||||
|
||||
|
@ -95,9 +93,9 @@ Short summary:
|
|||
unleashing graph builder capabilities and improving your graphs
|
||||
readability.
|
||||
|
||||
### Advanced Usage
|
||||
## Advanced Usage
|
||||
|
||||
#### Utility Functions
|
||||
### Utility Functions
|
||||
|
||||
Let's extract inference construction code into a dedicated utility function to
|
||||
help for readability and code reuse:
|
||||
|
@ -162,7 +160,7 @@ graphs construction code and helps automatically pull in calculator dependencies
|
|||
(e.g. no need to manually add `:inference_calculator` dep, just let your IDE
|
||||
include `inference.h` and build cleaner pull in corresponding dependency).
|
||||
|
||||
#### Utility Classes
|
||||
### Utility Classes
|
||||
|
||||
And surely, it's not only about functions, in some cases it's beneficial to
|
||||
introduce utility classes which can help making your graph construction code
|
||||
|
@ -277,3 +275,69 @@ Tip: the same as for the `RunInference` function, extracting
|
|||
`PassThroughNodeBuilder` and similar utility classes into dedicated modules
|
||||
enables reuse in graph construction code and helps to automatically pull in the
|
||||
corresponding calculator dependencies.
|
||||
|
||||
## Dos and Don'ts
|
||||
|
||||
### Define graph inputs at the very beginning if possible
|
||||
|
||||
```c++ {.bad}
|
||||
Stream<D> RunSomething(Stream<A> a, Stream<B> b, Graph& graph) {
|
||||
Stream<C> c = graph.In(2).SetName("c").Cast<C>(); // Bad.
|
||||
// ...
|
||||
}
|
||||
|
||||
CalculatorGraphConfig BuildGraph() {
|
||||
Graph graph;
|
||||
|
||||
Stream<A> a = graph.In(0).SetName("a").Cast<A>();
|
||||
// 10/100/N lines of code.
|
||||
Stream<B> b = graph.In(1).SetName("b").Cast<B>() // Bad.
|
||||
Stream<D> d = RunSomething(a, b, graph);
|
||||
// ...
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
In the above code:
|
||||
|
||||
* It can be hard to guess how many inputs you have in the graph.
|
||||
* Can be error prone overall and hard to maintain in future (e.g. is it a
|
||||
correct index? name? what if some inputs are removed or made optional?
|
||||
etc.).
|
||||
|
||||
Instead, simply define your graph inputs at the very beginning of your graph
|
||||
builder:
|
||||
|
||||
```c++ {.good}
|
||||
Stream<int> RunSomething(Stream<A> a, Stream<B> b, Stream<C> c, Graph& graph) {
|
||||
// ...
|
||||
}
|
||||
|
||||
CalculatorGraphConfig BuildGraph() {
|
||||
Graph graph;
|
||||
|
||||
Stream<A> a = graph.In(0).SetName("a").Cast<A>();
|
||||
Stream<B> b = graph.In(1).SetName("b").Cast<B>();
|
||||
Stream<C> c = graph.In(2).SetName("c").Cast<C>();
|
||||
|
||||
// 10/100/N lines of code.
|
||||
Stream<D> d = RunSomething(a, b, c, graph);
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
And if you have an input stream or side packet that is not always defined -
|
||||
simply use `std::optional` and put it at the very beginning as well:
|
||||
|
||||
```c++ {.good}
|
||||
std::optional<Stream<A>> a;
|
||||
if (needs_a) {
|
||||
a = graph.In(0).SetName(a).Cast<A>();
|
||||
}
|
||||
```
|
||||
|
||||
Note: of course, there can be exceptions - for example, there can be a use case
|
||||
where calling `RunSomething1(..., graph)`, ..., `RunSomethingN(..., graph)` is
|
||||
**intended to add new inputs**, so afterwards you can iterate over them and feed
|
||||
only added inputs into the graph. However, in any case, try to make it easy for
|
||||
readers to find out what graph inputs it has or may have.
|
||||
|
|
|
@ -489,9 +489,12 @@ cc_test(
|
|||
|
||||
cc_library(
|
||||
name = "frame_buffer",
|
||||
srcs = ["frame_buffer.cc"],
|
||||
hdrs = ["frame_buffer.h"],
|
||||
deps = [
|
||||
"//mediapipe/framework/port:integral_types",
|
||||
"@com_google_absl//absl/log:check",
|
||||
"@com_google_absl//absl/status",
|
||||
"@com_google_absl//absl/status:statusor",
|
||||
],
|
||||
)
|
||||
|
|
176
mediapipe/framework/formats/frame_buffer.cc
Normal file
176
mediapipe/framework/formats/frame_buffer.cc
Normal file
|
@ -0,0 +1,176 @@
|
|||
/* Copyright 2023 The MediaPipe Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
|
||||
#include "mediapipe/framework/formats/frame_buffer.h"
|
||||
|
||||
#include "absl/status/status.h"
|
||||
#include "absl/status/statusor.h"
|
||||
|
||||
namespace mediapipe {
|
||||
|
||||
namespace {
|
||||
|
||||
// Returns whether the input `format` is a supported YUV format.
|
||||
bool IsSupportedYuvFormat(FrameBuffer::Format format) {
|
||||
return format == FrameBuffer::Format::kNV21 ||
|
||||
format == FrameBuffer::Format::kNV12 ||
|
||||
format == FrameBuffer::Format::kYV12 ||
|
||||
format == FrameBuffer::Format::kYV21;
|
||||
}
|
||||
|
||||
// Returns supported 1-plane FrameBuffer in YuvData structure.
|
||||
absl::StatusOr<FrameBuffer::YuvData> GetYuvDataFromOnePlaneFrameBuffer(
|
||||
const FrameBuffer& source) {
|
||||
if (!IsSupportedYuvFormat(source.format())) {
|
||||
return absl::InvalidArgumentError(
|
||||
"The source FrameBuffer format is not part of YUV420 family.");
|
||||
}
|
||||
|
||||
FrameBuffer::YuvData result;
|
||||
const int y_buffer_size =
|
||||
source.plane(0).stride().row_stride_bytes * source.dimension().height;
|
||||
const int uv_buffer_size =
|
||||
((source.plane(0).stride().row_stride_bytes + 1) / 2) *
|
||||
((source.dimension().height + 1) / 2);
|
||||
result.y_buffer = source.plane(0).buffer();
|
||||
result.y_row_stride = source.plane(0).stride().row_stride_bytes;
|
||||
result.uv_row_stride = result.y_row_stride;
|
||||
|
||||
if (source.format() == FrameBuffer::Format::kNV21) {
|
||||
result.v_buffer = result.y_buffer + y_buffer_size;
|
||||
result.u_buffer = result.v_buffer + 1;
|
||||
result.uv_pixel_stride = 2;
|
||||
// If y_row_stride equals to the frame width and is an odd value,
|
||||
// uv_row_stride = y_row_stride + 1, otherwise uv_row_stride = y_row_stride.
|
||||
if (result.y_row_stride == source.dimension().width &&
|
||||
result.y_row_stride % 2 == 1) {
|
||||
result.uv_row_stride = (result.y_row_stride + 1) / 2 * 2;
|
||||
}
|
||||
} else if (source.format() == FrameBuffer::Format::kNV12) {
|
||||
result.u_buffer = result.y_buffer + y_buffer_size;
|
||||
result.v_buffer = result.u_buffer + 1;
|
||||
result.uv_pixel_stride = 2;
|
||||
// If y_row_stride equals to the frame width and is an odd value,
|
||||
// uv_row_stride = y_row_stride + 1, otherwise uv_row_stride = y_row_stride.
|
||||
if (result.y_row_stride == source.dimension().width &&
|
||||
result.y_row_stride % 2 == 1) {
|
||||
result.uv_row_stride = (result.y_row_stride + 1) / 2 * 2;
|
||||
}
|
||||
} else if (source.format() == FrameBuffer::Format::kYV21) {
|
||||
result.u_buffer = result.y_buffer + y_buffer_size;
|
||||
result.v_buffer = result.u_buffer + uv_buffer_size;
|
||||
result.uv_pixel_stride = 1;
|
||||
result.uv_row_stride = (result.y_row_stride + 1) / 2;
|
||||
} else if (source.format() == FrameBuffer::Format::kYV12) {
|
||||
result.v_buffer = result.y_buffer + y_buffer_size;
|
||||
result.u_buffer = result.v_buffer + uv_buffer_size;
|
||||
result.uv_pixel_stride = 1;
|
||||
result.uv_row_stride = (result.y_row_stride + 1) / 2;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Returns supported 2-plane FrameBuffer in YuvData structure.
|
||||
absl::StatusOr<FrameBuffer::YuvData> GetYuvDataFromTwoPlaneFrameBuffer(
|
||||
const FrameBuffer& source) {
|
||||
if (source.format() != FrameBuffer::Format::kNV12 &&
|
||||
source.format() != FrameBuffer::Format::kNV21) {
|
||||
return absl::InvalidArgumentError("Unsupported YUV planar format.");
|
||||
}
|
||||
|
||||
FrameBuffer::YuvData result;
|
||||
// Y plane
|
||||
result.y_buffer = source.plane(0).buffer();
|
||||
// All plane strides
|
||||
result.y_row_stride = source.plane(0).stride().row_stride_bytes;
|
||||
result.uv_row_stride = source.plane(1).stride().row_stride_bytes;
|
||||
result.uv_pixel_stride = 2;
|
||||
|
||||
if (source.format() == FrameBuffer::Format::kNV12) {
|
||||
// Y and UV interleaved format
|
||||
result.u_buffer = source.plane(1).buffer();
|
||||
result.v_buffer = result.u_buffer + 1;
|
||||
} else {
|
||||
// Y and VU interleaved format
|
||||
result.v_buffer = source.plane(1).buffer();
|
||||
result.u_buffer = result.v_buffer + 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Returns supported 3-plane FrameBuffer in YuvData structure. Note that NV21
|
||||
// and NV12 are included in the supported Yuv formats. Technically, NV21 and
|
||||
// NV12 should not be described by the 3-plane format. Historically, NV21 is
|
||||
// used loosely such that it can also be used to describe YV21 format. For
|
||||
// backwards compatibility, FrameBuffer supports NV21/NV12 with 3-plane format
|
||||
// but such usage is discouraged
|
||||
absl::StatusOr<FrameBuffer::YuvData> GetYuvDataFromThreePlaneFrameBuffer(
|
||||
const FrameBuffer& source) {
|
||||
if (!IsSupportedYuvFormat(source.format())) {
|
||||
return absl::InvalidArgumentError(
|
||||
"The source FrameBuffer format is not part of YUV420 family.");
|
||||
}
|
||||
|
||||
if (source.plane(1).stride().row_stride_bytes !=
|
||||
source.plane(2).stride().row_stride_bytes ||
|
||||
source.plane(1).stride().pixel_stride_bytes !=
|
||||
source.plane(2).stride().pixel_stride_bytes) {
|
||||
return absl::InternalError("Unsupported YUV planar format.");
|
||||
}
|
||||
FrameBuffer::YuvData result;
|
||||
if (source.format() == FrameBuffer::Format::kNV21 ||
|
||||
source.format() == FrameBuffer::Format::kYV12) {
|
||||
// Y follow by VU order. The VU chroma planes can be interleaved or
|
||||
// planar.
|
||||
result.y_buffer = source.plane(0).buffer();
|
||||
result.v_buffer = source.plane(1).buffer();
|
||||
result.u_buffer = source.plane(2).buffer();
|
||||
result.y_row_stride = source.plane(0).stride().row_stride_bytes;
|
||||
result.uv_row_stride = source.plane(1).stride().row_stride_bytes;
|
||||
result.uv_pixel_stride = source.plane(1).stride().pixel_stride_bytes;
|
||||
} else {
|
||||
// Y follow by UV order. The UV chroma planes can be interleaved or
|
||||
// planar.
|
||||
result.y_buffer = source.plane(0).buffer();
|
||||
result.u_buffer = source.plane(1).buffer();
|
||||
result.v_buffer = source.plane(2).buffer();
|
||||
result.y_row_stride = source.plane(0).stride().row_stride_bytes;
|
||||
result.uv_row_stride = source.plane(1).stride().row_stride_bytes;
|
||||
result.uv_pixel_stride = source.plane(1).stride().pixel_stride_bytes;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
absl::StatusOr<FrameBuffer::YuvData> FrameBuffer::GetYuvDataFromFrameBuffer(
|
||||
const FrameBuffer& source) {
|
||||
if (!IsSupportedYuvFormat(source.format())) {
|
||||
return absl::InvalidArgumentError(
|
||||
"The source FrameBuffer format is not part of YUV420 family.");
|
||||
}
|
||||
|
||||
if (source.plane_count() == 1) {
|
||||
return GetYuvDataFromOnePlaneFrameBuffer(source);
|
||||
} else if (source.plane_count() == 2) {
|
||||
return GetYuvDataFromTwoPlaneFrameBuffer(source);
|
||||
} else if (source.plane_count() == 3) {
|
||||
return GetYuvDataFromThreePlaneFrameBuffer(source);
|
||||
}
|
||||
return absl::InvalidArgumentError(
|
||||
"The source FrameBuffer must be consisted by 1, 2, or 3 planes");
|
||||
}
|
||||
|
||||
} // namespace mediapipe
|
|
@ -1,4 +1,4 @@
|
|||
/* Copyright 2022 The MediaPipe Authors. All Rights Reserved.
|
||||
/* Copyright 2023 The MediaPipe Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@ -19,6 +19,7 @@ limitations under the License.
|
|||
#include <vector>
|
||||
|
||||
#include "absl/log/check.h"
|
||||
#include "absl/status/statusor.h"
|
||||
#include "mediapipe/framework/port/integral_types.h"
|
||||
|
||||
namespace mediapipe {
|
||||
|
@ -118,6 +119,20 @@ class FrameBuffer {
|
|||
int Size() const { return width * height; }
|
||||
};
|
||||
|
||||
// YUV data structure.
|
||||
struct YuvData {
|
||||
const uint8* y_buffer;
|
||||
const uint8* u_buffer;
|
||||
const uint8* v_buffer;
|
||||
// Y buffer row stride in bytes.
|
||||
int y_row_stride;
|
||||
// U/V buffer row stride in bytes.
|
||||
int uv_row_stride;
|
||||
// U/V pixel stride in bytes. This is the distance between two consecutive
|
||||
// u/v pixel values in a row.
|
||||
int uv_pixel_stride;
|
||||
};
|
||||
|
||||
// Builds a FrameBuffer object from a row-major backing buffer.
|
||||
//
|
||||
// The FrameBuffer does not take ownership of the backing buffer. The caller
|
||||
|
@ -150,6 +165,12 @@ class FrameBuffer {
|
|||
// Returns FrameBuffer format.
|
||||
Format format() const { return format_; }
|
||||
|
||||
// Returns YuvData which contains the Y, U, and V buffer and their
|
||||
// stride info from the input `source` FrameBuffer which is in the YUV family
|
||||
// formats (e.g NV12, NV21, YV12, and YV21).
|
||||
static absl::StatusOr<YuvData> GetYuvDataFromFrameBuffer(
|
||||
const FrameBuffer& source);
|
||||
|
||||
private:
|
||||
std::vector<Plane> planes_;
|
||||
Dimension dimension_;
|
||||
|
|
|
@ -87,6 +87,8 @@ cc_library(
|
|||
cc_library(
|
||||
name = "builtin_task_graphs",
|
||||
deps = [
|
||||
"//mediapipe/tasks/cc/audio/audio_classifier:audio_classifier_graph",
|
||||
"//mediapipe/tasks/cc/audio/audio_embedder:audio_embedder_graph",
|
||||
"//mediapipe/tasks/cc/vision/gesture_recognizer:gesture_recognizer_graph",
|
||||
"//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph",
|
||||
"//mediapipe/tasks/cc/vision/image_embedder:image_embedder_graph",
|
||||
|
@ -94,11 +96,8 @@ cc_library(
|
|||
"//mediapipe/tasks/cc/vision/object_detector:object_detector_graph",
|
||||
] + select({
|
||||
# TODO: Build text_classifier_graph and text_embedder_graph on Windows.
|
||||
# TODO: Build audio_classifier_graph and audio_embedder_graph on Windows.
|
||||
"//mediapipe:windows": [],
|
||||
"//conditions:default": [
|
||||
"//mediapipe/tasks/cc/audio/audio_classifier:audio_classifier_graph",
|
||||
"//mediapipe/tasks/cc/audio/audio_embedder:audio_embedder_graph",
|
||||
"//mediapipe/tasks/cc/text/text_classifier:text_classifier_graph",
|
||||
"//mediapipe/tasks/cc/text/text_embedder:text_embedder_graph",
|
||||
],
|
||||
|
|
58
mediapipe/tasks/ios/test/vision/core/BUILD
Normal file
58
mediapipe/tasks/ios/test/vision/core/BUILD
Normal file
|
@ -0,0 +1,58 @@
|
|||
load(
|
||||
"@build_bazel_rules_apple//apple:ios.bzl",
|
||||
"ios_unit_test",
|
||||
)
|
||||
load(
|
||||
"//mediapipe/tasks:ios/ios.bzl",
|
||||
"MPP_TASK_MINIMUM_OS_VERSION",
|
||||
)
|
||||
load(
|
||||
"@org_tensorflow//tensorflow/lite:special_rules.bzl",
|
||||
"tflite_ios_lab_runner",
|
||||
)
|
||||
|
||||
package(default_visibility = ["//mediapipe/tasks:internal"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
# Default tags for filtering iOS targets. Targets are restricted to Apple platforms.
|
||||
TFL_DEFAULT_TAGS = [
|
||||
"apple",
|
||||
]
|
||||
|
||||
# Following sanitizer tests are not supported by iOS test targets.
|
||||
TFL_DISABLED_SANITIZER_TAGS = [
|
||||
"noasan",
|
||||
"nomsan",
|
||||
"notsan",
|
||||
]
|
||||
|
||||
objc_library(
|
||||
name = "MPPImageObjcTestLibrary",
|
||||
testonly = 1,
|
||||
srcs = ["MPPImageTests.m"],
|
||||
data = [
|
||||
"//mediapipe/tasks/testdata/vision:test_images",
|
||||
],
|
||||
sdk_frameworks = [
|
||||
"CoreMedia",
|
||||
"CoreVideo",
|
||||
"CoreGraphics",
|
||||
"UIKit",
|
||||
"Accelerate",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/tasks/ios/common:MPPCommon",
|
||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||
],
|
||||
)
|
||||
|
||||
ios_unit_test(
|
||||
name = "MPPImageObjcTest",
|
||||
minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION,
|
||||
runner = tflite_ios_lab_runner("IOS_LATEST"),
|
||||
tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS,
|
||||
deps = [
|
||||
":MPPImageObjcTestLibrary",
|
||||
],
|
||||
)
|
358
mediapipe/tasks/ios/test/vision/core/MPPImageTests.m
Normal file
358
mediapipe/tasks/ios/test/vision/core/MPPImageTests.m
Normal file
|
@ -0,0 +1,358 @@
|
|||
// Copyright 2023 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#import "mediapipe/tasks/ios/common/sources/MPPCommon.h"
|
||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
|
||||
|
||||
#import <Accelerate/Accelerate.h>
|
||||
#import <CoreGraphics/CoreGraphics.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <CoreVideo/CoreVideo.h>
|
||||
#import <XCTest/XCTest.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
static NSString *const kTestImageName = @"burger";
|
||||
static NSString *const kTestImageType = @"jpg";
|
||||
static CGFloat kTestImageWidthInPixels = 480.0f;
|
||||
static CGFloat kTestImageHeightInPixels = 325.0f;
|
||||
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
||||
|
||||
#define AssertEqualErrors(error, expectedError) \
|
||||
XCTAssertNotNil(error); \
|
||||
XCTAssertEqualObjects(error.domain, expectedError.domain); \
|
||||
XCTAssertEqual(error.code, expectedError.code); \
|
||||
XCTAssertNotEqual( \
|
||||
[error.localizedDescription rangeOfString:expectedError.localizedDescription].location, \
|
||||
NSNotFound)
|
||||
|
||||
/** Unit tests for `MPPImage`. */
|
||||
@interface MPPImageTests : XCTestCase
|
||||
|
||||
/** Test image. */
|
||||
@property(nonatomic, nullable) UIImage *image;
|
||||
|
||||
@end
|
||||
|
||||
@implementation MPPImageTests
|
||||
|
||||
#pragma mark - Tests
|
||||
|
||||
- (void)setUp {
|
||||
[super setUp];
|
||||
NSString *imageName = [[NSBundle bundleForClass:[self class]] pathForResource:kTestImageName
|
||||
ofType:kTestImageType];
|
||||
self.image = [[UIImage alloc] initWithContentsOfFile:imageName];
|
||||
}
|
||||
|
||||
- (void)tearDown {
|
||||
self.image = nil;
|
||||
[super tearDown];
|
||||
}
|
||||
|
||||
- (void)assertMPPImage:(nullable MPPImage *)mppImage
|
||||
hasSourceType:(MPPImageSourceType)sourceType
|
||||
hasOrientation:(UIImageOrientation)expectedOrientation
|
||||
width:(CGFloat)expectedWidth
|
||||
height:(CGFloat)expectedHeight {
|
||||
XCTAssertNotNil(mppImage);
|
||||
XCTAssertEqual(mppImage.imageSourceType, sourceType);
|
||||
XCTAssertEqual(mppImage.orientation, expectedOrientation);
|
||||
XCTAssertEqualWithAccuracy(mppImage.width, expectedWidth, FLT_EPSILON);
|
||||
XCTAssertEqualWithAccuracy(mppImage.height, expectedHeight, FLT_EPSILON);
|
||||
}
|
||||
|
||||
- (void)assertInitFailsWithImage:(nullable MPPImage *)mppImage
|
||||
error:(NSError *)error
|
||||
expectedError:(NSError *)expectedError {
|
||||
XCTAssertNil(mppImage);
|
||||
XCTAssertNotNil(error);
|
||||
AssertEqualErrors(error, expectedError);
|
||||
}
|
||||
|
||||
- (void)testInitWithImageSuceeds {
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithUIImage:self.image error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypeImage
|
||||
hasOrientation:self.image.imageOrientation
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithImageAndOrientation {
|
||||
UIImageOrientation orientation = UIImageOrientationRight;
|
||||
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithUIImage:self.image
|
||||
orientation:orientation
|
||||
error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypeImage
|
||||
hasOrientation:orientation
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithImage_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithUIImage:nil error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self
|
||||
assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : @"Image cannot be nil."
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)testInitWithImageAndOrientation_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithUIImage:nil
|
||||
orientation:UIImageOrientationRight
|
||||
error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self
|
||||
assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : @"Image cannot be nil."
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)testInitWithSampleBuffer {
|
||||
CMSampleBufferRef sampleBuffer = [self sampleBuffer];
|
||||
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithSampleBuffer:sampleBuffer error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypeSampleBuffer
|
||||
hasOrientation:UIImageOrientationUp
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithSampleBufferAndOrientation {
|
||||
UIImageOrientation orientation = UIImageOrientationRight;
|
||||
CMSampleBufferRef sampleBuffer = [self sampleBuffer];
|
||||
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithSampleBuffer:sampleBuffer
|
||||
orientation:orientation
|
||||
error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypeSampleBuffer
|
||||
hasOrientation:orientation
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithSampleBuffer_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithSampleBuffer:nil error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self
|
||||
assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:
|
||||
[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey :
|
||||
@"Sample buffer is not valid. Invoking "
|
||||
@"CMSampleBufferIsValid(sampleBuffer) must return true."
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)testInitWithSampleBufferAndOrientation_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithSampleBuffer:nil
|
||||
orientation:UIImageOrientationRight
|
||||
error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self
|
||||
assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:
|
||||
[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey :
|
||||
@"Sample buffer is not valid. Invoking "
|
||||
@"CMSampleBufferIsValid(sampleBuffer) must return true."
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)testInitWithPixelBuffer {
|
||||
CMSampleBufferRef sampleBuffer = [self sampleBuffer];
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithPixelBuffer:pixelBuffer error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypePixelBuffer
|
||||
hasOrientation:UIImageOrientationUp
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithPixelBufferAndOrientation {
|
||||
UIImageOrientation orientation = UIImageOrientationRight;
|
||||
|
||||
CMSampleBufferRef sampleBuffer = [self sampleBuffer];
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithPixelBuffer:pixelBuffer
|
||||
orientation:orientation
|
||||
error:nil];
|
||||
[self assertMPPImage:mppImage
|
||||
hasSourceType:MPPImageSourceTypePixelBuffer
|
||||
hasOrientation:orientation
|
||||
width:kTestImageWidthInPixels
|
||||
height:kTestImageHeightInPixels];
|
||||
}
|
||||
|
||||
- (void)testInitWithPixelBuffer_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithPixelBuffer:nil error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey :
|
||||
@"Pixel Buffer cannot be nil."
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)testInitWithPixelBufferAndOrientation_nilImage {
|
||||
NSError *error;
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wnonnull"
|
||||
MPPImage *mppImage = [[MPPImage alloc] initWithPixelBuffer:nil
|
||||
orientation:UIImageOrientationRight
|
||||
error:&error];
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
[self assertInitFailsWithImage:mppImage
|
||||
error:error
|
||||
expectedError:[NSError errorWithDomain:kExpectedErrorDomain
|
||||
code:MPPTasksErrorCodeInvalidArgumentError
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey :
|
||||
@"Pixel Buffer cannot be nil."
|
||||
}]];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
/**
|
||||
* Converts the input image in RGBA space into a `CMSampleBuffer`.
|
||||
*
|
||||
* @return `CMSampleBuffer` converted from the given `UIImage`.
|
||||
*/
|
||||
- (CMSampleBufferRef)sampleBuffer {
|
||||
// Rotate the image and convert from RGBA to BGRA.
|
||||
CGImageRef CGImage = self.image.CGImage;
|
||||
size_t width = CGImageGetWidth(CGImage);
|
||||
size_t height = CGImageGetHeight(CGImage);
|
||||
size_t bpr = CGImageGetBytesPerRow(CGImage);
|
||||
|
||||
CGDataProviderRef provider = CGImageGetDataProvider(CGImage);
|
||||
NSData *imageRGBAData = (id)CFBridgingRelease(CGDataProviderCopyData(provider));
|
||||
const uint8_t order[4] = {2, 1, 0, 3};
|
||||
|
||||
NSData *imageBGRAData = nil;
|
||||
unsigned char *bgraPixel = (unsigned char *)malloc([imageRGBAData length]);
|
||||
if (bgraPixel) {
|
||||
vImage_Buffer src;
|
||||
src.height = height;
|
||||
src.width = width;
|
||||
src.rowBytes = bpr;
|
||||
src.data = (void *)[imageRGBAData bytes];
|
||||
|
||||
vImage_Buffer dest;
|
||||
dest.height = height;
|
||||
dest.width = width;
|
||||
dest.rowBytes = bpr;
|
||||
dest.data = bgraPixel;
|
||||
|
||||
// Specify ordering changes in map.
|
||||
vImage_Error error = vImagePermuteChannels_ARGB8888(&src, &dest, order, kvImageNoFlags);
|
||||
|
||||
// Package the result.
|
||||
if (error == kvImageNoError) {
|
||||
imageBGRAData = [NSData dataWithBytes:bgraPixel length:[imageRGBAData length]];
|
||||
}
|
||||
|
||||
// Memory cleanup.
|
||||
free(bgraPixel);
|
||||
}
|
||||
|
||||
if (imageBGRAData == nil) {
|
||||
XCTFail(@"Failed to convert input image.");
|
||||
}
|
||||
|
||||
// Write data to `CMSampleBuffer`.
|
||||
NSDictionary *options = @{
|
||||
(__bridge NSString *)kCVPixelBufferCGImageCompatibilityKey : @(YES),
|
||||
(__bridge NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey : @(YES)
|
||||
};
|
||||
CVPixelBufferRef pixelBuffer;
|
||||
CVReturn status = CVPixelBufferCreateWithBytes(
|
||||
kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, (void *)[imageBGRAData bytes],
|
||||
bpr, NULL, nil, (__bridge CFDictionaryRef)options, &pixelBuffer);
|
||||
|
||||
if (status != kCVReturnSuccess) {
|
||||
XCTFail(@"Failed to create pixel buffer.");
|
||||
}
|
||||
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
||||
CMVideoFormatDescriptionRef videoInfo = NULL;
|
||||
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
|
||||
|
||||
CMSampleBufferRef buffer;
|
||||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo,
|
||||
&kCMTimingInfoInvalid, &buffer);
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
|
@ -4,8 +4,12 @@ licenses(["notice"])
|
|||
|
||||
objc_library(
|
||||
name = "MPPImage",
|
||||
srcs = ["sources/MPPImage.h"],
|
||||
hdrs = ["sources/MPPImage.m"],
|
||||
srcs = ["sources/MPPImage.m"],
|
||||
hdrs = ["sources/MPPImage.h"],
|
||||
copts = [
|
||||
"-ObjC++",
|
||||
"-std=c++17",
|
||||
],
|
||||
module_name = "MPPImage",
|
||||
sdk_frameworks = [
|
||||
"CoreMedia",
|
||||
|
@ -13,6 +17,7 @@ objc_library(
|
|||
"UIKit",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/tasks/ios/common:MPPCommon",
|
||||
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
|
||||
],
|
||||
copts = [
|
||||
|
|
Loading…
Reference in New Issue
Block a user