diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index d3f946bbe..f8cfcc916 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -80,7 +80,7 @@ NS_SWIFT_NAME(ObjectDetector) * Creates a new instance of `MPPObjectDetector` from the given `MPPObjectDetectorOptions`. * * @param options The options of type `MPPObjectDetectorOptions` to use for configuring the - * `MPPImageClassifMPPObjectDetectorier`. + * `MPPObjectDetector`. * @param error An optional error parameter populated when there is an error in initializing the * object detector. * @@ -96,7 +96,7 @@ NS_SWIFT_NAME(ObjectDetector) * `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPRunningModeImage`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -123,7 +123,7 @@ NS_SWIFT_NAME(ObjectDetector) * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with * `MPPRunningModeVideo`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in of RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -161,7 +161,7 @@ NS_SWIFT_NAME(ObjectDetector) * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the object detector. The input timestamps must be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of + * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer * must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA @@ -170,8 +170,8 @@ NS_SWIFT_NAME(ObjectDetector) * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color * space is RGB with an Alpha channel. * - * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you - * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure + * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * * @param image A live stream image data of type `MPPImage` on which object detection is to be diff --git a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm index b2f9cfc08..3a8a72f71 100644 --- a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.mm @@ -25,8 +25,12 @@ using ::mediapipe::Packet; + (nullable MPPObjectDetectorResult *)objectDetectorResultWithDetectionsPacket: (const Packet &)packet { + + NSInteger timestampInMilliseconds = (NSInteger)(packet.Timestamp().Value() / + kMicroSecondsPerMilliSecond); if (!packet.ValidateAsType>().ok()) { - return nil; + return [[MPPObjectDetectorResult alloc] initWithDetections:@[] + timestampInMilliseconds:timestampInMilliseconds]; } const std::vector &detectionProtos = packet.Get>(); @@ -39,8 +43,7 @@ using ::mediapipe::Packet; return [[MPPObjectDetectorResult alloc] initWithDetections:detections - timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / - kMicroSecondsPerMilliSecond)]; + timestampInMilliseconds:timestampInMilliseconds]; } @end