diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h index bef6bb9ee..00807c021 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.h @@ -16,6 +16,11 @@ NS_ASSUME_NONNULL_BEGIN +typedef NS_ENUM(NSInteger, MPPBaseOptionsDelegate) { + MPPBaseOptionsDelegateCPU = 0, + MPPBaseOptionsDelegateGPU = 1 +} NS_SWIFT_NAME(BaseOptionsDelegate); + /** * Holds the base options that is used for creation of any type of task. It has fields with * important information acceleration configuration, TFLite model source etc. @@ -26,6 +31,12 @@ NS_SWIFT_NAME(BaseOptions) /** The path to the model asset to open and mmap in memory. */ @property(nonatomic, copy) NSString *modelAssetPath; +/** + * The delegate to run MediaPipe. If the delegate is not set, the default + * delegate CPU is used. Use `delegate_options` to configure advanced + * features of the selected delegate. + */ +@property(nonatomic) MPPBaseOptionsDelegate delegate; @end NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m index a43119ad8..2c7b167ff 100644 --- a/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m +++ b/mediapipe/tasks/ios/core/sources/MPPBaseOptions.m @@ -28,7 +28,7 @@ MPPBaseOptions *baseOptions = [[MPPBaseOptions alloc] init]; baseOptions.modelAssetPath = self.modelAssetPath; - + baseOptions.delegate = self.delegate; return baseOptions; } diff --git a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm index 73bcac49d..fe40132a6 100644 --- a/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm +++ b/mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.mm @@ -33,6 +33,16 @@ using BaseOptionsProto = ::mediapipe::tasks::core::proto::BaseOptions; if (self.modelAssetPath) { baseOptionsProto->mutable_model_asset()->set_file_name(self.modelAssetPath.UTF8String); } + switch (self.delegate) { + case MPPBaseOptionsDelegateCPU: { + baseOptionsProto->mutable_acceleration()->mutable_tflite(); + break; + } + case MPPBaseOptionsDelegateGPU: { + baseOptionsProto->mutable_acceleration()->mutable_gpu(); + break; + } + } } @end diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPImage.m b/mediapipe/tasks/ios/vision/core/sources/MPPImage.m index 1f5104ef7..5952ee7cc 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPImage.m +++ b/mediapipe/tasks/ios/vision/core/sources/MPPImage.m @@ -98,6 +98,8 @@ NS_ASSUME_NONNULL_BEGIN if (imageBuffer == NULL) { return nil; } + _pixelBuffer = imageBuffer; + CVPixelBufferRetain(imageBuffer); self = [super init]; if (self != nil) { diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.mm index af419c6d0..6298492df 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.mm @@ -32,6 +32,10 @@ using ::mediapipe::Timestamp; @implementation MPPVisionPacketCreator + (Packet)createPacketWithMPPImage:(MPPImage *)image error:(NSError **)error { + if ((image.imageSourceType == MPPImageSourceTypePixelBuffer || image.imageSourceType == MPPImageSourceTypeSampleBuffer) && [image pixelBuffer] != nil) { + return MakePacket([image pixelBuffer]); + } + std::unique_ptr imageFrame = [image imageFrameWithError:error]; if (!imageFrame) { @@ -44,14 +48,9 @@ using ::mediapipe::Timestamp; + (Packet)createPacketWithMPPImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { - std::unique_ptr imageFrame = [image imageFrameWithError:error]; + auto packet = [self createPacketWithMPPImage:image error:error]; - if (!imageFrame) { - return Packet(); - } - - return MakePacket(std::move(imageFrame)) - .At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond))); + return packet.At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond))); } + (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect {