Removed roi apis from iOS object detector

This commit is contained in:
Prianka Liz Kariat 2023-04-10 19:24:58 +05:30
parent 0fd60285b5
commit adfe47d456
2 changed files with 30 additions and 105 deletions

View File

@ -109,28 +109,6 @@ NS_SWIFT_NAME(ObjectDetector)
error:(NSError **)error
NS_SWIFT_NAME(detect(image:));
/**
* Performs object detectionon the provided `MPPImage` cropped to the specified region of
* interest. Rotation will be applied on the cropped image according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeImage`.
*
* @param image The `MPPImage` on which object detection is to be performed.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
* object detection should be performed.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
*
* @return An `MPPObjectDetectionResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
*/
- (nullable MPPObjectDetectionResult *)detectInImage:(MPPImage *)image
regionOfInterest:(CGRect)roi
error:(NSError **)error
NS_SWIFT_NAME(detect(image:regionOfInterest:));
/**
* Performs object detection on the provided video frame of type `MPPImage` using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
@ -153,36 +131,6 @@ NS_SWIFT_NAME(ObjectDetector)
error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampMs:));
/**
* Performs object detection on the provided video frame of type `MPPImage` cropped to the
* specified region of interest. Rotation will be applied according to the `orientation` property of
* the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeVideo`.
*
* It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must
* be monotonically increasing.
*
* @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed.
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
* monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
* object detection should be performed.
*
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input image.
*
* @return An `MPPObjectDetectionResult` object that contains a list of detections, each detection
* has a bounding box that is expressed in the unrotated input frame of reference coordinates
* system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying
* image data.
*/
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
regionOfInterest:(CGRect)roi
error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampMs:regionOfInterest:));
/**
* Sends live stream image data of type `MPPImage` to perform object detection using the whole
* image as region of interest. Rotation will be applied according to the `orientation` property of
@ -206,33 +154,6 @@ NS_SWIFT_NAME(ObjectDetector)
timestampMs:(NSInteger)timestampMs
error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampMs:));
/**
* Sends live stream image data of type `MPPImage` to perform object detection, cropped to the
* specified region of interest. Rotation will be applied according to the `orientation` property
* of the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with
* `MPPRunningModeLiveStream`. Results are provided asynchronously via the `completion` callback
* provided in the `MPPObjectDetectorOptions`.
*
* It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing.
*
* @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed.
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
* to the object detector. The input timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given live stream image data
* of type `MPPImage`, on which iobject detection should be performed.
* @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data.
*
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
regionOfInterest:(CGRect)roi
error:(NSError **)error
NS_SWIFT_NAME(detectAsync(image:timestampMs:regionOfInterest:));
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;

View File

@ -158,12 +158,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
regionOfInterest:(CGRect)roi
error:(NSError **)error {
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectFromRegionOfInterest:roi
[_visionTaskRunner normalizedRectFromRegionOfInterest:CGRectZero
imageOrientation:image.orientation
ROIAllowed:YES
ROIAllowed:NO
error:error];
if (!rect.has_value()) {
return std::nullopt;
@ -184,16 +183,41 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
}
- (nullable MPPObjectDetectionResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
return [self detectInImage:image regionOfInterest:CGRectZero error:error];
std::optional<NormalizedRect> rect =
[_visionTaskRunner normalizedRectFromRegionOfInterest:CGRectZero
imageOrientation:image.orientation
ROIAllowed:YES
error:error];
if (!rect.has_value()) {
return nil;
}
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
if (imagePacket.IsEmpty()) {
return nil;
}
Packet normalizedRectPacket =
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
error:error];
if (!outputPacketMap.has_value()) {
return nil;
}
return [MPPObjectDetectionResult
objectDetectionResultWithDetectionsPacket:outputPacketMap
.value()[kDetectionsStreamName.cppString]];
}
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
regionOfInterest:(CGRect)roi
error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs
regionOfInterest:roi
error:error];
if (!inputPacketMap.has_value()) {
return nil;
@ -211,22 +235,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
.value()[kDetectionsStreamName.cppString]];
}
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
error:(NSError **)error {
return [self detectInVideoFrame:image
timestampMs:timestampMs
regionOfInterest:CGRectZero
error:error];
}
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
regionOfInterest:(CGRect)roi
error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs
regionOfInterest:roi
error:error];
if (!inputPacketMap.has_value()) {
return NO;
@ -235,13 +248,4 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
}
- (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs
error:(NSError **)error {
return [self detectAsyncInImage:image
timestampMs:timestampMs
regionOfInterest:CGRectZero
error:error];
}
@end