diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h index cd464c6a1..bbc9aa8a5 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h @@ -90,7 +90,7 @@ NS_SWIFT_NAME(ClassificationResult) * amount of data to process might exceed the maximum size that the model can process: to solve * this, the input data is split into multiple chunks starting at different timestamps. */ -@property(nonatomic, readonly) NSInteger timestampMs; +@property(nonatomic, readonly) NSInteger timestampInMilliseconds; /** * Initializes a new `MPPClassificationResult` with the given array of classifications and time @@ -98,14 +98,15 @@ NS_SWIFT_NAME(ClassificationResult) * * @param classifications An Array of `MPPClassifications` objects containing the predicted * categories for each head of the model. - * @param timestampMs The timestamp (in milliseconds) of the start of the chunk of data + * @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data * corresponding to these results. * * @return An instance of `MPPClassificationResult` initialized with the given array of - * classifications and timestampMs. + * classifications and timestamp (in milliseconds). */ - (instancetype)initWithClassifications:(NSArray *)classifications - timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + NS_DESIGNATED_INITIALIZER; - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.m b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.m index 6d42d22ca..8d9440492 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.m +++ b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.m @@ -38,11 +38,11 @@ @implementation MPPClassificationResult - (instancetype)initWithClassifications:(NSArray *)classifications - timestampMs:(NSInteger)timestampMs { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { self = [super init]; if (self) { _classifications = classifications; - _timestampMs = timestampMs; + _timestampInMilliseconds = timestampInMilliseconds; } return self; diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.h b/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.h index 8fd9b9dff..4cfd8890d 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.h @@ -33,7 +33,7 @@ NS_SWIFT_NAME(EmbeddingResult) * cases, the amount of data to process might exceed the maximum size that the model can process. To * solve this, the input data is split into multiple chunks starting at different timestamps. */ -@property(nonatomic, readonly) NSInteger timestampMs; +@property(nonatomic, readonly) NSInteger timestampInMilliseconds; /** * Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in @@ -41,14 +41,14 @@ NS_SWIFT_NAME(EmbeddingResult) * * @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each * head of the model. - * @param timestampMs The optional timestamp (in milliseconds) of the start of the chunk of data - * corresponding to these results. Pass `0` if timestamp is absent. + * @param timestampInMilliseconds The optional timestamp (in milliseconds) of the start of the chunk + * of data corresponding to these results. Pass `0` if timestamp is absent. * * @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and - * timestampMs. + * timestamp (in milliseconds). */ - (instancetype)initWithEmbeddings:(NSArray *)embeddings - timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER; - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.m b/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.m index 56dd30fdd..1f4828583 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.m +++ b/mediapipe/tasks/ios/components/containers/sources/MPPEmbeddingResult.m @@ -17,11 +17,11 @@ @implementation MPPEmbeddingResult - (instancetype)initWithEmbeddings:(NSArray *)embeddings - timestampMs:(NSInteger)timestampMs { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { self = [super init]; if (self) { _embeddings = embeddings; - _timestampMs = timestampMs; + _timestampInMilliseconds = timestampInMilliseconds; } return self; diff --git a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm index b02b032bb..47f1cf45c 100644 --- a/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm +++ b/mediapipe/tasks/ios/components/containers/utils/sources/MPPClassificationResult+Helpers.mm @@ -55,13 +55,13 @@ using ClassificationResultProto = [classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]]; } - NSInteger timestampMs = 0; + NSInteger timestampInMilliseconds = 0; if (classificationResultProto.has_timestamp_ms()) { - timestampMs = (NSInteger)classificationResultProto.timestamp_ms(); + timestampInMilliseconds = (NSInteger)classificationResultProto.timestamp_ms(); } return [[MPPClassificationResult alloc] initWithClassifications:classifications - timestampMs:timestampMs]; + timestampInMilliseconds:timestampInMilliseconds]; ; } diff --git a/mediapipe/tasks/ios/components/containers/utils/sources/MPPEmbeddingResult+Helpers.mm b/mediapipe/tasks/ios/components/containers/utils/sources/MPPEmbeddingResult+Helpers.mm index f9863e9ca..cf5569c07 100644 --- a/mediapipe/tasks/ios/components/containers/utils/sources/MPPEmbeddingResult+Helpers.mm +++ b/mediapipe/tasks/ios/components/containers/utils/sources/MPPEmbeddingResult+Helpers.mm @@ -31,12 +31,13 @@ using EmbeddingResultProto = ::mediapipe::tasks::components::containers::proto:: [embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]]; } - NSInteger timestampMs = 0; + NSInteger timestampInMilliseconds = 0; if (embeddingResultProto.has_timestamp_ms()) { - timestampMs = (NSInteger)embeddingResultProto.timestamp_ms(); + timestampInMilliseconds = (NSInteger)embeddingResultProto.timestamp_ms(); } - return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings timestampMs:timestampMs]; + return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings + timestampInMilliseconds:timestampInMilliseconds]; } @end diff --git a/mediapipe/tasks/ios/core/sources/MPPTaskResult.h b/mediapipe/tasks/ios/core/sources/MPPTaskResult.h index 4ee7b2fc6..664a94ba6 100644 --- a/mediapipe/tasks/ios/core/sources/MPPTaskResult.h +++ b/mediapipe/tasks/ios/core/sources/MPPTaskResult.h @@ -26,11 +26,12 @@ NS_SWIFT_NAME(TaskResult) /** * Timestamp that is associated with the task result object. */ -@property(nonatomic, assign, readonly) NSInteger timestampMs; +@property(nonatomic, assign, readonly) NSInteger timestampInMilliseconds; - (instancetype)init NS_UNAVAILABLE; -- (instancetype)initWithTimestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds + NS_DESIGNATED_INITIALIZER; @end diff --git a/mediapipe/tasks/ios/core/sources/MPPTaskResult.m b/mediapipe/tasks/ios/core/sources/MPPTaskResult.m index 6c08014ff..8a7fa6b5b 100644 --- a/mediapipe/tasks/ios/core/sources/MPPTaskResult.m +++ b/mediapipe/tasks/ios/core/sources/MPPTaskResult.m @@ -16,16 +16,16 @@ @implementation MPPTaskResult -- (instancetype)initWithTimestampMs:(NSInteger)timestampMs { +- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds { self = [super init]; if (self) { - _timestampMs = timestampMs; + _timestampInMilliseconds = timestampInMilliseconds; } return self; } - (id)copyWithZone:(NSZone *)zone { - return [[MPPTaskResult alloc] initWithTimestampMs:self.timestampMs]; + return [[MPPTaskResult alloc] initWithTimestampInMilliseconds:self.timestampInMilliseconds]; } @end diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index f7b26837e..694a17ca7 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -487,7 +487,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; NSError *liveStreamApiCallError; XCTAssertFalse([imageClassifier classifyAsyncImage:image - timestampMs:0 + timestampInMilliseconds:0 error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = @@ -501,7 +501,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); NSError *videoApiCallError; - XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]); + XCTAssertFalse([imageClassifier classifyVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -524,7 +526,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; NSError *liveStreamApiCallError; XCTAssertFalse([imageClassifier classifyAsyncImage:image - timestampMs:0 + timestampInMilliseconds:0 error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = @@ -575,7 +577,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; AssertEqualErrors(imageApiCallError, expectedImageApiCallError); NSError *videoApiCallError; - XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]); + XCTAssertFalse([imageClassifier classifyVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -601,7 +605,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; for (int i = 0; i < 3; i++) { MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image - timestampMs:i + timestampInMilliseconds:i error:nil]; [self assertImageClassifierResult:imageClassifierResult hasExpectedCategoriesCount:maxResults @@ -630,10 +634,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; MPPImage *image = [self imageWithFileInfo:kBurgerImage]; - XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:1 error:nil]); + XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]); NSError *error; - XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampMs:0 error:&error]); + XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampInMilliseconds:0 error:&error]); NSError *expectedError = [NSError errorWithDomain:kExpectedErrorDomain @@ -668,7 +672,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; MPPImage *image = [self imageWithFileInfo:kBurgerImage]; for (int i = 0; i < 3; i++) { - XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:i error:nil]); + XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]); } } diff --git a/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.h b/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.h index 6744a8e16..9ce7fcec2 100644 --- a/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.h +++ b/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.h @@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextClassifierResult) * * @param classificationResult The `MPPClassificationResult` instance containing one set of results * per classifier head. - * @param timestampMs The timestamp for this result. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * * @return An instance of `MPPTextClassifierResult` initialized with the given * `MPPClassificationResult` and timestamp (in milliseconds). */ - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - timestampMs:(NSInteger)timestampMs; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; @end diff --git a/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.m b/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.m index 4d5c1104a..09a2097cc 100644 --- a/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.m +++ b/mediapipe/tasks/ios/text/text_classifier/sources/MPPTextClassifierResult.m @@ -17,8 +17,8 @@ @implementation MPPTextClassifierResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - timestampMs:(NSInteger)timestampMs { - self = [super initWithTimestampMs:timestampMs]; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; if (self) { _classificationResult = classificationResult; } diff --git a/mediapipe/tasks/ios/text/text_classifier/utils/sources/MPPTextClassifierResult+Helpers.mm b/mediapipe/tasks/ios/text/text_classifier/utils/sources/MPPTextClassifierResult+Helpers.mm index f5d6aa1d3..5a924016e 100644 --- a/mediapipe/tasks/ios/text/text_classifier/utils/sources/MPPTextClassifierResult+Helpers.mm +++ b/mediapipe/tasks/ios/text/text_classifier/utils/sources/MPPTextClassifierResult+Helpers.mm @@ -35,7 +35,7 @@ using ::mediapipe::Packet; return [[MPPTextClassifierResult alloc] initWithClassificationResult:classificationResult - timestampMs:(NSInteger)(packet.Timestamp().Value() / + timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)]; } diff --git a/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.h b/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.h index e4697dcef..ab8edd16b 100644 --- a/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.h +++ b/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.h @@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextEmbedderResult) * * @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results * per classifier head. - * @param timestampMs The timestamp for this result. + * @param timestampInMilliseconds The timestamp (in millisecondss) for this result. * * @return An instance of `MPPTextEmbedderResult` initialized with the given * `MPPEmbeddingResult` and timestamp (in milliseconds). */ - (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult - timestampMs:(NSInteger)timestampMs; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.m b/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.m index 5483e3c3f..d764f63d6 100644 --- a/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.m +++ b/mediapipe/tasks/ios/text/text_embedder/sources/MPPTextEmbedderResult.m @@ -17,8 +17,8 @@ @implementation MPPTextEmbedderResult - (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult - timestampMs:(NSInteger)timestampMs { - self = [super initWithTimestampMs:timestampMs]; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; if (self) { _embeddingResult = embeddingResult; } diff --git a/mediapipe/tasks/ios/text/text_embedder/utils/sources/MPPTextEmbedderResult+Helpers.mm b/mediapipe/tasks/ios/text/text_embedder/utils/sources/MPPTextEmbedderResult+Helpers.mm index b769292ce..3534ea66d 100644 --- a/mediapipe/tasks/ios/text/text_embedder/utils/sources/MPPTextEmbedderResult+Helpers.mm +++ b/mediapipe/tasks/ios/text/text_embedder/utils/sources/MPPTextEmbedderResult+Helpers.mm @@ -34,7 +34,7 @@ using ::mediapipe::Packet; return [[MPPTextEmbedderResult alloc] initWithEmbeddingResult:embeddingResult - timestampMs:(NSInteger)(packet.Timestamp().Value() / + timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)]; } diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 581c8d95b..345687877 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -122,17 +122,17 @@ NS_SWIFT_NAME(ImageClassifier) * `MPPRunningModeVideo`. * * @param image The `MPPImage` on which image classification is to be performed. - * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be - * monotonically increasing. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing image * classification on the input video frame. * * @return An `MPPImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error - NS_SWIFT_NAME(classify(videoFrame:timestampMs:)); + NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:)); /** * Performs image classification on the provided video frame of type `MPPImage` cropped to the @@ -145,8 +145,8 @@ NS_SWIFT_NAME(ImageClassifier) * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. - * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be - * monotonically increasing. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the video frame of type * `MPPImage`, on which image classification should be performed. * @param error An optional error parameter populated when there is an error in performing image @@ -155,10 +155,10 @@ NS_SWIFT_NAME(ImageClassifier) * @return An `MPPImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error - NS_SWIFT_NAME(classify(videoFrame:timestampMs:regionOfInterest:)); + NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:)); /** * Sends live stream image data of type `MPPImage` to perform image classification using the whole @@ -172,16 +172,17 @@ NS_SWIFT_NAME(ImageClassifier) * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. - * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent - * to the image classifier. The input timestamps must be monotonically increasing. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image is sent to the image classifier. The input timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing image * classification on the input live stream image data. * * @return `YES` if the image was sent to the task successfully, otherwise `NO`. */ - (BOOL)classifyAsyncImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - error:(NSError **)error NS_SWIFT_NAME(classifyAsync(image:timestampMs:)); + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); /** * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the @@ -195,8 +196,8 @@ NS_SWIFT_NAME(ImageClassifier) * * @param image A live stream image data of type `MPPImage` on which image classification is to be * performed. - * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent - * to the image classifier. The input timestamps must be monotonically increasing. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image is sent to the image classifier. The input timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the given live stream image data * of type `MPPImage`, on which image classification should be performed. * @param error An optional error parameter populated when there is an error in performing image @@ -205,10 +206,10 @@ NS_SWIFT_NAME(ImageClassifier) * @return `YES` if the image was sent to the task successfully, otherwise `NO`. */ - (BOOL)classifyAsyncImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - regionOfInterest:(CGRect)roi - error:(NSError **)error - NS_SWIFT_NAME(classifyAsync(image:timestampMs:regionOfInterest:)); + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + regionOfInterest:(CGRect)roi + error:(NSError **)error + NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:regionOfInterest:)); - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm index 8051fbf3d..18c1bb56a 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm @@ -149,7 +149,7 @@ static NSString *const kTaskGraphName = } - (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { std::optional rect = @@ -162,14 +162,15 @@ static NSString *const kTaskGraphName = } Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds error:error]; if (imagePacket.IsEmpty()) { return std::nullopt; } - Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() - timestampMs:timestampMs]; + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() + timestampInMilliseconds:timestampInMilliseconds]; PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); return inputPacketMap; @@ -180,11 +181,11 @@ static NSString *const kTaskGraphName = } - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:roi error:error]; if (!inputPacketMap.has_value()) { @@ -204,20 +205,20 @@ static NSString *const kTaskGraphName = } - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { return [self classifyVideoFrame:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:CGRectZero error:error]; } - (BOOL)classifyAsyncImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - regionOfInterest:(CGRect)roi - error:(NSError **)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + regionOfInterest:(CGRect)roi + error:(NSError **)error { std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:roi error:error]; if (!inputPacketMap.has_value()) { @@ -228,10 +229,10 @@ static NSString *const kTaskGraphName = } - (BOOL)classifyAsyncImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - error:(NSError **)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { return [self classifyAsyncImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:CGRectZero error:error]; } diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h index 92fdb13cb..478bd452a 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h @@ -31,13 +31,13 @@ NS_SWIFT_NAME(ImageClassifierResult) * * @param classificationResult The `MPPClassificationResult` instance containing one set of results * per classifier head. - * @param timestampMs The timestamp for this result. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * * @return An instance of `MPPImageClassifierResult` initialized with the given * `MPPClassificationResult` and timestamp (in milliseconds). */ - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - timestampMs:(NSInteger)timestampMs; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; @end diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.m b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.m index 6dcd064eb..cb17bb10e 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.m +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.m @@ -17,8 +17,8 @@ @implementation MPPImageClassifierResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - timestampMs:(NSInteger)timestampMs { - self = [super initWithTimestampMs:timestampMs]; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; if (self) { _classificationResult = classificationResult; } diff --git a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm index 09e21b278..f5199765d 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.mm @@ -34,7 +34,7 @@ using ::mediapipe::Packet; return [[MPPImageClassifierResult alloc] initWithClassificationResult:classificationResult - timestampMs:(NSInteger)(packet.Timestamp().Value() / + timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)]; } diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.h index 590867bf8..da9899d40 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.h @@ -36,13 +36,13 @@ NS_SWIFT_NAME(ObjectDetectionResult) * @param detections An array of `MPPDetection` objects each of which has a bounding box that is * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * x [0,image_height)`, which are the dimensions of the underlying image data. - * @param timestampMs The timestamp for this result. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * * @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections * and timestamp (in milliseconds). */ - (instancetype)initWithDetections:(NSArray *)detections - timestampMs:(NSInteger)timestampMs; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; @end diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.m b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.m index ac24c19fa..18174d073 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.m +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectionResult.m @@ -17,8 +17,8 @@ @implementation MPPObjectDetectionResult - (instancetype)initWithDetections:(NSArray *)detections - timestampMs:(NSInteger)timestampMs { - self = [super initWithTimestampMs:timestampMs]; + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampMs:timestampInMilliseconds]; if (self) { _detections = detections; } diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index 58344d0c7..f92c90c50 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -138,8 +138,8 @@ NS_SWIFT_NAME(ObjectDetector) * `MPPRunningModeVideo`. * * @param image The `MPPImage` on which object detection is to be performed. - * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be - * monotonically increasing. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing object * detection on the input image. * @@ -149,9 +149,9 @@ NS_SWIFT_NAME(ObjectDetector) * image data. */ - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error - NS_SWIFT_NAME(detect(videoFrame:timestampMs:)); + NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); /** * Performs object detection on the provided video frame of type `MPPImage` cropped to the @@ -164,8 +164,8 @@ NS_SWIFT_NAME(ObjectDetector) * * @param image A live stream image data of type `MPPImage` on which object detection is to be * performed. - * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be - * monotonically increasing. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which * object detection should be performed. * @@ -178,10 +178,10 @@ NS_SWIFT_NAME(ObjectDetector) * image data. */ - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error - NS_SWIFT_NAME(detect(videoFrame:timestampMs:regionOfInterest:)); + NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:regionOfInterest:)); /** * Sends live stream image data of type `MPPImage` to perform object detection using the whole @@ -195,16 +195,17 @@ NS_SWIFT_NAME(ObjectDetector) * * @param image A live stream image data of type `MPPImage` on which object detection is to be * performed. - * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent - * to the object detector. The input timestamps must be monotonically increasing. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image is sent to the object detector. The input timestamps must be monotonically increasing. * @param error An optional error parameter populated when there is an error in performing object * detection on the input live stream image data. * * @return `YES` if the image was sent to the task successfully, otherwise `NO`. */ - (BOOL)detectAsyncInImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampMs:)); + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:)); /** * Sends live stream image data of type `MPPImage` to perform object detection, cropped to the @@ -218,8 +219,8 @@ NS_SWIFT_NAME(ObjectDetector) * * @param image A live stream image data of type `MPPImage` on which object detection is to be * performed. - * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent - * to the object detector. The input timestamps must be monotonically increasing. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image is sent to the object detector. The input timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the given live stream image data * of type `MPPImage`, on which iobject detection should be performed. * @param error An optional error parameter populated when there is an error in performing object @@ -228,10 +229,10 @@ NS_SWIFT_NAME(ObjectDetector) * @return `YES` if the image was sent to the task successfully, otherwise `NO`. */ - (BOOL)detectAsyncInImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - regionOfInterest:(CGRect)roi - error:(NSError **)error - NS_SWIFT_NAME(detectAsync(image:timestampMs:regionOfInterest:)); + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + regionOfInterest:(CGRect)roi + error:(NSError **)error + NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:regionOfInterest:)); - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm index 53dcad4a8..e1aa11e96 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm @@ -157,7 +157,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG } - (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { std::optional rect = @@ -170,14 +170,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG } Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds error:error]; if (imagePacket.IsEmpty()) { return std::nullopt; } - Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() - timestampMs:timestampMs]; + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() + timestampInMilliseconds:timestampInMilliseconds]; PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); return inputPacketMap; @@ -188,11 +189,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG } - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:roi error:error]; if (!inputPacketMap.has_value()) { @@ -212,20 +213,20 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG } - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - timestampMs:(NSInteger)timestampMs + timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { return [self detectInVideoFrame:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:CGRectZero error:error]; } - (BOOL)detectAsyncInImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - regionOfInterest:(CGRect)roi - error:(NSError **)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + regionOfInterest:(CGRect)roi + error:(NSError **)error { std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:roi error:error]; if (!inputPacketMap.has_value()) { @@ -236,10 +237,10 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG } - (BOOL)detectAsyncInImage:(MPPImage *)image - timestampMs:(NSInteger)timestampMs - error:(NSError **)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { return [self detectAsyncInImage:image - timestampMs:timestampMs + timestampInMilliseconds:timestampInMilliseconds regionOfInterest:CGRectZero error:error]; } diff --git a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectionResult+Helpers.mm b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectionResult+Helpers.mm index 3507b7d72..225a6993d 100644 --- a/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectionResult+Helpers.mm +++ b/mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectionResult+Helpers.mm @@ -38,8 +38,9 @@ using ::mediapipe::Packet; } return [[MPPObjectDetectionResult alloc] - initWithDetections:detections - timestampMs:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)]; + initWithDetections:detections + timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() / + kMicroSecondsPerMilliSecond)]; } @end