Merge pull request #4194 from priankakariatyml:ios-image-classifier-tests

PiperOrigin-RevId: 519907148
This commit is contained in:
Copybara-Service 2023-03-27 20:46:18 -07:00
commit 59b3150fff
14 changed files with 923 additions and 32 deletions

View File

@ -107,18 +107,18 @@ using ::mediapipe::InputStreamInfo;
for (NSString *inputStream in self.inputStreams) {
graphConfig.add_input_stream(inputStream.cppString);
NSString *strippedInputStream = [MPPTaskInfo stripTagIndex:inputStream];
flowLimitCalculatorNode->add_input_stream(strippedInputStream.cppString);
NSString *taskInputStream = [MPPTaskInfo addStreamNamePrefix:inputStream];
taskSubgraphNode->add_input_stream(taskInputStream.cppString);
NSString *strippedInputStream = [MPPTaskInfo stripTagIndex:inputStream];
flowLimitCalculatorNode->add_input_stream(strippedInputStream.cppString);
NSString *strippedTaskInputStream = [MPPTaskInfo stripTagIndex:taskInputStream];
flowLimitCalculatorNode->add_output_stream(strippedTaskInputStream.cppString);
}
NSString *firstOutputStream = self.outputStreams[0];
auto finishedOutputStream = "FINISHED:" + firstOutputStream.cppString;
NSString *strippedFirstOutputStream = [MPPTaskInfo stripTagIndex:self.outputStreams[0]];
auto finishedOutputStream = "FINISHED:" + strippedFirstOutputStream.cppString;
flowLimitCalculatorNode->add_input_stream(finishedOutputStream);
return graphConfig;

View File

@ -20,6 +20,8 @@ NS_ASSUME_NONNULL_BEGIN
@interface MPPBaseOptions (Helpers)
- (void)copyToProto:(mediapipe::tasks::core::proto::BaseOptions *)baseOptionsProto;
- (void)copyToProto:(mediapipe::tasks::core::proto::BaseOptions *)baseOptionsProto
withUseStreamMode:(BOOL)useStreamMode;
@end

View File

@ -22,6 +22,11 @@ using BaseOptionsProto = ::mediapipe::tasks::core::proto::BaseOptions;
@implementation MPPBaseOptions (Helpers)
- (void)copyToProto:(BaseOptionsProto *)baseOptionsProto withUseStreamMode:(BOOL)useStreamMode {
[self copyToProto:baseOptionsProto];
baseOptionsProto->set_use_stream_mode(useStreamMode);
}
- (void)copyToProto:(BaseOptionsProto *)baseOptionsProto {
baseOptionsProto->Clear();

View File

@ -0,0 +1,55 @@
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test")
load(
"//mediapipe/tasks:ios/ios.bzl",
"MPP_TASK_MINIMUM_OS_VERSION",
)
load(
"@org_tensorflow//tensorflow/lite:special_rules.bzl",
"tflite_ios_lab_runner",
)
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
# Default tags for filtering iOS targets. Targets are restricted to Apple platforms.
TFL_DEFAULT_TAGS = [
"apple",
]
# Following sanitizer tests are not supported by iOS test targets.
TFL_DISABLED_SANITIZER_TAGS = [
"noasan",
"nomsan",
"notsan",
]
objc_library(
name = "MPPImageClassifierObjcTestLibrary",
testonly = 1,
srcs = ["MPPImageClassifierTests.m"],
copts = [
"-ObjC++",
"-std=c++17",
"-x objective-c++",
],
data = [
"//mediapipe/tasks/testdata/vision:test_images",
"//mediapipe/tasks/testdata/vision:test_models",
],
deps = [
"//mediapipe/tasks/ios/common:MPPCommon",
"//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils",
"//mediapipe/tasks/ios/vision/image_classifier:MPPImageClassifier",
],
)
ios_unit_test(
name = "MPPImageClassifierObjcTest",
minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION,
runner = tflite_ios_lab_runner("IOS_LATEST"),
tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS,
deps = [
":MPPImageClassifierObjcTestLibrary",
],
)

View File

@ -0,0 +1,675 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <XCTest/XCTest.h>
#import "mediapipe/tasks/ios/common/sources/MPPCommon.h"
#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h"
#import "mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h"
static NSString *kFloatModelName = @"mobilenet_v2_1.0_224";
static NSString *const kQuantizedModelName = @"mobilenet_v1_0.25_224_quant";
static NSDictionary *const kBurgerImage = @{@"name" : @"burger", @"type" : @"jpg"};
static NSDictionary *const kBurgerRotatedImage = @{@"name" : @"burger_rotated", @"type" : @"jpg"};
static NSDictionary *const kMultiObjectsImage = @{@"name" : @"multi_objects", @"type" : @"jpg"};
static NSDictionary *const kMultiObjectsRotatedImage =
@{@"name" : @"multi_objects_rotated", @"type" : @"jpg"};
static const int kMobileNetCategoriesCount = 1001;
static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
#define AssertEqualErrors(error, expectedError) \
XCTAssertNotNil(error); \
XCTAssertEqualObjects(error.domain, expectedError.domain); \
XCTAssertEqual(error.code, expectedError.code); \
XCTAssertNotEqual( \
[error.localizedDescription rangeOfString:expectedError.localizedDescription].location, \
NSNotFound)
#define AssertEqualCategoryArrays(categories, expectedCategories) \
XCTAssertEqual(categories.count, expectedCategories.count); \
for (int i = 0; i < categories.count; i++) { \
XCTAssertEqual(categories[i].index, expectedCategories[i].index, @"index i = %d", i); \
XCTAssertEqualWithAccuracy(categories[i].score, expectedCategories[i].score, 1e-3, \
@"index i = %d", i); \
XCTAssertEqualObjects(categories[i].categoryName, expectedCategories[i].categoryName, \
@"index i = %d", i); \
XCTAssertEqualObjects(categories[i].displayName, expectedCategories[i].displayName, \
@"index i = %d", i); \
}
#define AssertImageClassifierResultHasOneHead(imageClassifierResult) \
XCTAssertNotNil(imageClassifierResult); \
XCTAssertNotNil(imageClassifierResult.classificationResult); \
XCTAssertEqual(imageClassifierResult.classificationResult.classifications.count, 1); \
XCTAssertEqual(imageClassifierResult.classificationResult.classifications[0].headIndex, 0);
@interface MPPImageClassifierTests : XCTestCase
@end
@implementation MPPImageClassifierTests
#pragma mark Results
+ (NSArray<MPPCategory *> *)expectedResultCategoriesForClassifyBurgerImageWithFloatModel {
return @[
[[MPPCategory alloc] initWithIndex:934
score:0.786005f
categoryName:@"cheeseburger"
displayName:nil],
[[MPPCategory alloc] initWithIndex:932 score:0.023508f categoryName:@"bagel" displayName:nil],
[[MPPCategory alloc] initWithIndex:925
score:0.021172f
categoryName:@"guacamole"
displayName:nil]
];
}
#pragma mark File
- (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension {
NSString *filePath = [[NSBundle bundleForClass:self.class] pathForResource:fileName
ofType:extension];
return filePath;
}
#pragma mark Classifier Initializers
- (MPPImageClassifierOptions *)imageClassifierOptionsWithModelName:(NSString *)modelName {
NSString *modelPath = [self filePathWithName:modelName extension:@"tflite"];
MPPImageClassifierOptions *imageClassifierOptions = [[MPPImageClassifierOptions alloc] init];
imageClassifierOptions.baseOptions.modelAssetPath = modelPath;
return imageClassifierOptions;
}
- (MPPImageClassifier *)imageClassifierFromModelFileWithName:(NSString *)modelName {
NSString *modelPath = [self filePathWithName:modelName extension:@"tflite"];
MPPImageClassifier *imageClassifier = [[MPPImageClassifier alloc] initWithModelPath:modelPath
error:nil];
XCTAssertNotNil(imageClassifier);
return imageClassifier;
}
- (MPPImageClassifier *)imageClassifierWithOptionsSucceeds:
(MPPImageClassifierOptions *)imageClassifierOptions {
MPPImageClassifier *imageClassifier =
[[MPPImageClassifier alloc] initWithOptions:imageClassifierOptions error:nil];
XCTAssertNotNil(imageClassifier);
return imageClassifier;
}
#pragma mark Assert Classify Results
- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo {
MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPImageClassifierTests class]
fileName:fileInfo[@"name"]
ofType:fileInfo[@"type"]];
XCTAssertNotNil(image);
return image;
}
- (MPPImage *)imageWithFileInfo:(NSDictionary *)fileInfo
orientation:(UIImageOrientation)orientation {
MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPImageClassifierTests class]
fileName:fileInfo[@"name"]
ofType:fileInfo[@"type"]
orientation:orientation];
XCTAssertNotNil(image);
return image;
}
- (void)assertCreateImageClassifierWithOptions:(MPPImageClassifierOptions *)imageClassifierOptions
failsWithExpectedError:(NSError *)expectedError {
NSError *error = nil;
MPPImageClassifier *imageClassifier =
[[MPPImageClassifier alloc] initWithOptions:imageClassifierOptions error:&error];
XCTAssertNil(imageClassifier);
AssertEqualErrors(error, expectedError);
}
- (void)assertImageClassifierResult:(MPPImageClassifierResult *)imageClassifierResult
hasExpectedCategoriesCount:(NSInteger)expectedCategoriesCount
expectedCategories:(NSArray<MPPCategory *> *)expectedCategories {
AssertImageClassifierResultHasOneHead(imageClassifierResult);
NSArray<MPPCategory *> *resultCategories =
imageClassifierResult.classificationResult.classifications[0].categories;
XCTAssertEqual(resultCategories.count, expectedCategoriesCount);
NSArray<MPPCategory *> *categorySubsetToCompare;
if (resultCategories.count > expectedCategories.count) {
categorySubsetToCompare =
[resultCategories subarrayWithRange:NSMakeRange(0, expectedCategories.count)];
} else {
categorySubsetToCompare = resultCategories;
}
AssertEqualCategoryArrays(categorySubsetToCompare, expectedCategories);
}
- (void)assertResultsOfClassifyImage:(MPPImage *)mppImage
usingImageClassifier:(MPPImageClassifier *)imageClassifier
expectedCategoriesCount:(NSInteger)expectedCategoriesCount
equalsCategories:(NSArray<MPPCategory *> *)expectedCategories {
MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyImage:mppImage
error:nil];
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:expectedCategoriesCount
expectedCategories:expectedCategories];
}
- (void)assertResultsOfClassifyImageWithFileInfo:(NSDictionary *)fileInfo
usingImageClassifier:(MPPImageClassifier *)imageClassifier
expectedCategoriesCount:(NSInteger)expectedCategoriesCount
equalsCategories:(NSArray<MPPCategory *> *)expectedCategories {
MPPImage *mppImage = [self imageWithFileInfo:fileInfo];
[self assertResultsOfClassifyImage:mppImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:expectedCategoriesCount
equalsCategories:expectedCategories];
}
#pragma mark General Tests
- (void)testCreateImageClassifierWithMissingModelPathFails {
NSString *modelPath = [self filePathWithName:@"" extension:@""];
NSError *error = nil;
MPPImageClassifier *imageClassifier = [[MPPImageClassifier alloc] initWithModelPath:modelPath
error:&error];
XCTAssertNil(imageClassifier);
NSError *expectedError = [NSError
errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"INVALID_ARGUMENT: ExternalFile must specify at least one of 'file_content', "
@"'file_name', 'file_pointer_meta' or 'file_descriptor_meta'."
}];
AssertEqualErrors(error, expectedError);
}
- (void)testCreateImageClassifierAllowlistAndDenylistFails {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.categoryAllowlist = @[ @"cheeseburger" ];
options.categoryDenylist = @[ @"bagel" ];
[self assertCreateImageClassifierWithOptions:options
failsWithExpectedError:
[NSError
errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"INVALID_ARGUMENT: `category_allowlist` and "
@"`category_denylist` are mutually exclusive options."
}]];
}
- (void)testClassifyWithModelPathAndFloatModelSucceeds {
MPPImageClassifier *imageClassifier = [self imageClassifierFromModelFileWithName:kFloatModelName];
[self
assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:kMobileNetCategoriesCount
equalsCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
}
- (void)testClassifyWithOptionsAndFloatModelSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
const NSInteger maxResults = 3;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
[self
assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:maxResults
equalsCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
}
- (void)testClassifyWithQuantizedModelSucceeds {
MPPImageClassifierOptions *options =
[self imageClassifierOptionsWithModelName:kQuantizedModelName];
const NSInteger maxResults = 1;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[ [[MPPCategory alloc] initWithIndex:934
score:0.972656f
categoryName:@"cheeseburger"
displayName:nil] ];
[self assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:maxResults
equalsCategories:expectedCategories];
}
- (void)testClassifyWithScoreThresholdSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.scoreThreshold = 0.25f;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[ [[MPPCategory alloc] initWithIndex:934
score:0.786005f
categoryName:@"cheeseburger"
displayName:nil] ];
[self assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:expectedCategories.count
equalsCategories:expectedCategories];
}
- (void)testClassifyWithAllowlistSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.categoryAllowlist = @[ @"cheeseburger", @"guacamole", @"meat loaf" ];
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[
[[MPPCategory alloc] initWithIndex:934
score:0.786005f
categoryName:@"cheeseburger"
displayName:nil],
[[MPPCategory alloc] initWithIndex:925
score:0.021172f
categoryName:@"guacamole"
displayName:nil],
[[MPPCategory alloc] initWithIndex:963
score:0.006279315f
categoryName:@"meat loaf"
displayName:nil],
];
[self assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:expectedCategories.count
equalsCategories:expectedCategories];
}
- (void)testClassifyWithDenylistSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.categoryDenylist = @[
@"bagel",
];
options.maxResults = 3;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[
[[MPPCategory alloc] initWithIndex:934
score:0.786005f
categoryName:@"cheeseburger"
displayName:nil],
[[MPPCategory alloc] initWithIndex:925
score:0.021172f
categoryName:@"guacamole"
displayName:nil],
[[MPPCategory alloc] initWithIndex:963
score:0.006279315f
categoryName:@"meat loaf"
displayName:nil],
];
[self assertResultsOfClassifyImageWithFileInfo:kBurgerImage
usingImageClassifier:imageClassifier
expectedCategoriesCount:expectedCategories.count
equalsCategories:expectedCategories];
}
- (void)testClassifyWithRegionOfInterestSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
NSInteger maxResults = 1;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[ [[MPPCategory alloc] initWithIndex:806
score:0.997122f
categoryName:@"soccer ball"
displayName:nil] ];
MPPImage *image = [self imageWithFileInfo:kMultiObjectsImage];
// roi around soccer ball
MPPImageClassifierResult *imageClassifierResult =
[imageClassifier classifyImage:image
regionOfInterest:CGRectMake(0.450f, 0.308f, 0.164f, 0.426f)
error:nil];
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults
expectedCategories:expectedCategories];
}
- (void)testClassifyWithOrientationSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
NSInteger maxResults = 3;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories = @[
[[MPPCategory alloc] initWithIndex:934
score:0.622074f
categoryName:@"cheeseburger"
displayName:nil],
[[MPPCategory alloc] initWithIndex:963
score:0.051214f
categoryName:@"meat loaf"
displayName:nil],
[[MPPCategory alloc] initWithIndex:925
score:0.048719f
categoryName:@"guacamole"
displayName:nil]
];
MPPImage *image = [self imageWithFileInfo:kBurgerRotatedImage
orientation:UIImageOrientationRight];
[self assertResultsOfClassifyImage:image
usingImageClassifier:imageClassifier
expectedCategoriesCount:maxResults
equalsCategories:expectedCategories];
}
- (void)testClassifyWithRegionOfInterestAndOrientationSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
NSInteger maxResults = 1;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
NSArray<MPPCategory *> *expectedCategories =
@[ [[MPPCategory alloc] initWithIndex:560
score:0.682305f
categoryName:@"folding chair"
displayName:nil] ];
MPPImage *image = [self imageWithFileInfo:kMultiObjectsRotatedImage
orientation:UIImageOrientationRight];
// roi around folding chair
MPPImageClassifierResult *imageClassifierResult =
[imageClassifier classifyImage:image
regionOfInterest:CGRectMake(0.0f, 0.1763f, 0.5642f, 0.1286f)
error:nil];
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults
expectedCategories:expectedCategories];
}
#pragma mark Running Mode Tests
- (void)testCreateImageClassifierFailsWithResultListenerInNonLiveStreamMode {
MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo};
for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = runningModesToTest[i];
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
};
[self
assertCreateImageClassifierWithOptions:options
failsWithExpectedError:
[NSError
errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in image or video mode, a "
@"user-defined result callback should not be provided."
}]];
}
}
- (void)testCreateImageClassifierFailsWithMissingResultListenerInLiveStreamMode {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeLiveStream;
[self assertCreateImageClassifierWithOptions:options
failsWithExpectedError:
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"The vision task is in live stream mode, a "
@"user-defined result callback must be provided."
}]];
}
- (void)testClassifyFailsWithCallingWrongApiInImageMode {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
NSError *liveStreamApiCallError;
XCTAssertFalse([imageClassifier classifyAsyncImage:image
timestampMs:0
error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with live "
@"stream mode. Current Running Mode: Image"
}];
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *videoApiCallError;
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"video mode. Current Running Mode: Image"
}];
AssertEqualErrors(videoApiCallError, expectedVideoApiCallError);
}
- (void)testClassifyFailsWithCallingWrongApiInVideoMode {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeVideo;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
NSError *liveStreamApiCallError;
XCTAssertFalse([imageClassifier classifyAsyncImage:image
timestampMs:0
error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with live "
@"stream mode. Current Running Mode: Video"
}];
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *imageApiCallError;
XCTAssertFalse([imageClassifier classifyImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"image mode. Current Running Mode: Video"
}];
AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
}
- (void)testClassifyFailsWithCallingWrongApiInLiveStreamMode {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
};
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
NSError *imageApiCallError;
XCTAssertFalse([imageClassifier classifyImage:image error:&imageApiCallError]);
NSError *expectedImageApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"image mode. Current Running Mode: Live Stream"
}];
AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
NSError *videoApiCallError;
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey : @"The vision task is not initialized with "
@"video mode. Current Running Mode: Live Stream"
}];
AssertEqualErrors(videoApiCallError, expectedVideoApiCallError);
}
- (void)testClassifyWithVideoModeSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
options.runningMode = MPPRunningModeVideo;
NSInteger maxResults = 3;
options.maxResults = maxResults;
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
for (int i = 0; i < 3; i++) {
MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image
timestampMs:i
error:nil];
[self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
}
}
- (void)testClassifyWithOutOfOrderTimestampsAndLiveStreamModeFails {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
NSInteger maxResults = 3;
options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
};
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:1 error:nil]);
NSError *error;
XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampMs:0 error:&error]);
NSError *expectedError =
[NSError errorWithDomain:kExpectedErrorDomain
code:MPPTasksErrorCodeInvalidArgumentError
userInfo:@{
NSLocalizedDescriptionKey :
@"INVALID_ARGUMENT: Input timestamp must be monotonically increasing."
}];
AssertEqualErrors(error, expectedError);
}
- (void)testClassifyWithLiveStreamModeSucceeds {
MPPImageClassifierOptions *options = [self imageClassifierOptionsWithModelName:kFloatModelName];
NSInteger maxResults = 3;
options.maxResults = maxResults;
options.runningMode = MPPRunningModeLiveStream;
options.completion = ^(MPPImageClassifierResult *result, NSError *error) {
[self assertImageClassifierResult:result
hasExpectedCategoriesCount:maxResults
expectedCategories:
[MPPImageClassifierTests
expectedResultCategoriesForClassifyBurgerImageWithFloatModel]];
};
MPPImageClassifier *imageClassifier = [self imageClassifierWithOptionsSucceeds:options];
// TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used
// with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type
// `CMSampleBuffer`.
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
for (int i = 0; i < 3; i++) {
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:i error:nil]);
}
}
@end

View File

@ -0,0 +1,13 @@
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
objc_library(
name = "MPPImageTestUtils",
srcs = ["sources/MPPImage+TestUtils.m"],
hdrs = ["sources/MPPImage+TestUtils.h"],
module_name = "MPPImageTestUtils",
deps = [
"//mediapipe/tasks/ios/vision/core:MPPImage",
],
)

View File

@ -0,0 +1,63 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h"
NS_ASSUME_NONNULL_BEGIN
/**
* Helper utility for initializing `MPPImage` for MediaPipe iOS vision library tests.
*/
@interface MPPImage (TestUtils)
/**
* Loads an image from a file in an app bundle into a `MPPImage` object.
*
* @param classObject The specified class associated with the bundle containing the file to be
* loaded.
* @param name Name of the image file.
* @param type Extenstion of the image file.
*
* @return The `MPPImage` object contains the loaded image. This method returns
* nil if it cannot load the image.
*/
+ (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type
NS_SWIFT_NAME(imageFromBundle(class:filename:type:));
/**
* Loads an image from a file in an app bundle into a `MPPImage` object with the specified
* orientation.
*
* @param classObject The specified class associated with the bundle containing the file to be
* loaded.
* @param name Name of the image file.
* @param type Extenstion of the image file.
* @param orientation Orientation of the image.
*
* @return The `MPPImage` object contains the loaded image. This method returns
* nil if it cannot load the image.
*/
+ (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type
orientation:(UIImageOrientation)imageOrientation
NS_SWIFT_NAME(imageFromBundle(class:filename:type:orientation:));
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,57 @@
// Copyright 2023 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h"
@interface UIImage (FileUtils)
+ (nullable UIImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type;
@end
@implementation UIImage (FileUtils)
+ (nullable UIImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type {
NSString *imagePath = [[NSBundle bundleForClass:classObject] pathForResource:name ofType:type];
if (!imagePath) return nil;
return [[UIImage alloc] initWithContentsOfFile:imagePath];
}
@end
@implementation MPPImage (TestUtils)
+ (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type {
UIImage *image = [UIImage imageFromBundleWithClass:classObject fileName:name ofType:type];
return [[MPPImage alloc] initWithUIImage:image error:nil];
}
+ (nullable MPPImage *)imageFromBundleWithClass:(Class)classObject
fileName:(NSString *)name
ofType:(NSString *)type
orientation:(UIImageOrientation)imageOrientation {
UIImage *image = [UIImage imageFromBundleWithClass:classObject fileName:name ofType:type];
return [[MPPImage alloc] initWithUIImage:image orientation:imageOrientation error:nil];
}
@end

View File

@ -54,11 +54,13 @@ objc_library(
],
deps = [
":MPPRunningMode",
"//mediapipe/calculators/core:flow_limiter_calculator",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/tasks/ios/common:MPPCommon",
"//mediapipe/tasks/ios/common/utils:MPPCommonUtils",
"//mediapipe/tasks/ios/core:MPPTaskRunner",
"//third_party/apple_frameworks:UIKit",
"@com_google_absl//absl/status:statusor",
"@ios_opencv//:OpencvFramework",
],
)

View File

@ -44,9 +44,9 @@ NS_INLINE NSString *MPPRunningModeDisplayName(MPPRunningMode runningMode) {
}
NSString *displayNameMap[MPPRunningModeLiveStream + 1] = {
[MPPRunningModeImage] = @"#MPPRunningModeImage",
[MPPRunningModeVideo] = @ "#MPPRunningModeVideo",
[MPPRunningModeLiveStream] = @ "#MPPRunningModeLiveStream"};
[MPPRunningModeImage] = @"Image",
[MPPRunningModeVideo] = @"Video",
[MPPRunningModeLiveStream] = @"Live Stream"};
return displayNameMap[runningMode];
}

View File

@ -97,7 +97,7 @@ static const NSInteger kMPPOrientationDegreesLeft = -270;
return std::nullopt;
}
CGRect calculatedRoi = CGRectEqualToRect(roi, CGRectZero) ? roi : CGRectMake(0.0, 0.0, 1.0, 1.0);
CGRect calculatedRoi = CGRectEqualToRect(roi, CGRectZero) ? CGRectMake(0.0, 0.0, 1.0, 1.0) : roi;
NormalizedRect normalizedRect;
normalizedRect.set_x_center(CGRectGetMidX(calculatedRoi));

View File

@ -131,7 +131,9 @@ using ::mediapipe::ImageFrame;
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
size_t destinationChannelCount = 3;
size_t destinationStride = destinationChannelCount * width;
uint8_t *rgbPixelData = [MPPPixelDataUtils
rgbPixelDataFromPixelData:(uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer)
@ -147,9 +149,10 @@ using ::mediapipe::ImageFrame;
return nullptr;
}
std::unique_ptr<ImageFrame> imageFrame = absl::make_unique<ImageFrame>(
::mediapipe::ImageFormat::SRGB, width, height, stride, static_cast<uint8 *>(rgbPixelData),
/*deleter=*/free);
std::unique_ptr<ImageFrame> imageFrame =
absl::make_unique<ImageFrame>(::mediapipe::ImageFormat::SRGB, width, height,
destinationStride, static_cast<uint8 *>(rgbPixelData),
/*deleter=*/free);
return imageFrame;
}
@ -183,11 +186,14 @@ using ::mediapipe::ImageFrame;
NSInteger bitsPerComponent = 8;
NSInteger channelCount = 4;
size_t bytesPerRow = channelCount * width;
NSInteger destinationChannelCount = 3;
size_t destinationBytesPerRow = destinationChannelCount * width;
UInt8 *pixelDataToReturn = NULL;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
size_t bytesPerRow = channelCount * width;
// iOS infers bytesPerRow if it is set to 0.
// See https://developer.apple.com/documentation/coregraphics/1455939-cgbitmapcontextcreate
// But for segmentation test image, this was not the case.
@ -219,10 +225,14 @@ using ::mediapipe::ImageFrame;
CGColorSpaceRelease(colorSpace);
std::unique_ptr<ImageFrame> imageFrame =
absl::make_unique<ImageFrame>(mediapipe::ImageFormat::SRGB, (int)width, (int)height,
(int)bytesPerRow, static_cast<uint8 *>(pixelDataToReturn),
/*deleter=*/free);
if (!pixelDataToReturn) {
return nullptr;
}
std::unique_ptr<ImageFrame> imageFrame = absl::make_unique<ImageFrame>(
mediapipe::ImageFormat::SRGB, (int)width, (int)height, (int)destinationBytesPerRow,
static_cast<uint8 *>(pixelDataToReturn),
/*deleter=*/free);
return imageFrame;
}

View File

@ -36,15 +36,17 @@ static NSString *const kClassificationsTag = @"CLASSIFICATIONS";
static NSString *const kImageInStreamName = @"image_in";
static NSString *const kImageOutStreamName = @"image_out";
static NSString *const kImageTag = @"IMAGE";
static NSString *const kNormRectName = @"norm_rect_in";
static NSString *const kNormRectStreamName = @"norm_rect_in";
static NSString *const kNormRectTag = @"NORM_RECT";
static NSString *const kTaskGraphName =
@"mediapipe.tasks.vision.image_classifier.ImageClassifierGraph";
#define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \
{kImageInStreamName.cppString, imagePacket}, { kNormRectName.cppString, normalizedRectPacket } \
#define InputPacketMap(imagePacket, normalizedRectPacket) \
{ \
{kImageInStreamName.cppString, imagePacket}, { \
kNormRectStreamName.cppString, normalizedRectPacket \
} \
}
@interface MPPImageClassifier () {
@ -60,12 +62,17 @@ static NSString *const kTaskGraphName =
if (self) {
MPPTaskInfo *taskInfo = [[MPPTaskInfo alloc]
initWithTaskGraphName:kTaskGraphName
inputStreams:@[ [NSString
stringWithFormat:@"%@:%@", kImageTag, kImageInStreamName] ]
outputStreams:@[ [NSString stringWithFormat:@"%@:%@", kClassificationsTag,
kClassificationsStreamName] ]
inputStreams:@[
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageInStreamName],
[NSString stringWithFormat:@"%@:%@", kNormRectTag, kNormRectStreamName]
]
outputStreams:@[
[NSString
stringWithFormat:@"%@:%@", kClassificationsTag, kClassificationsStreamName],
[NSString stringWithFormat:@"%@:%@", kImageTag, kImageOutStreamName]
]
taskOptions:options
enableFlowLimiting:NO
enableFlowLimiting:options.runningMode == MPPRunningModeLiveStream
error:error];
if (!taskInfo) {
@ -130,8 +137,8 @@ static NSString *const kTaskGraphName =
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processPacketMap:inputPacketMap
error:error];
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
if (!outputPacketMap.has_value()) {
return nil;
}

View File

@ -21,7 +21,7 @@
#include "mediapipe/tasks/cc/vision/image_classifier/proto/image_classifier_graph_options.pb.h"
namespace {
using CalculatorOptionsProto = ::mediapipe::CalculatorOptions;
using CalculatorOptionsProto = mediapipe::CalculatorOptions;
using ImageClassifierGraphOptionsProto =
::mediapipe::tasks::vision::image_classifier::proto::ImageClassifierGraphOptions;
using ClassifierOptionsProto = ::mediapipe::tasks::components::processors::proto::ClassifierOptions;
@ -32,7 +32,9 @@ using ClassifierOptionsProto = ::mediapipe::tasks::components::processors::proto
- (void)copyToProto:(CalculatorOptionsProto *)optionsProto {
ImageClassifierGraphOptionsProto *graphOptions =
optionsProto->MutableExtension(ImageClassifierGraphOptionsProto::ext);
[self.baseOptions copyToProto:graphOptions->mutable_base_options()];
[self.baseOptions copyToProto:graphOptions->mutable_base_options()
withUseStreamMode:self.runningMode != MPPRunningModeImage];
ClassifierOptionsProto *classifierOptionsProto = graphOptions->mutable_classifier_options();
classifierOptionsProto->Clear();