Updated all references of timestampMs to timestampInMilliseconds

This commit is contained in:
Prianka Liz Kariat 2023-04-13 17:56:09 +05:30
parent 3fdd7f3618
commit 2c154e20cc
25 changed files with 131 additions and 119 deletions

View File

@ -90,7 +90,7 @@ NS_SWIFT_NAME(ClassificationResult)
* amount of data to process might exceed the maximum size that the model can process: to solve * amount of data to process might exceed the maximum size that the model can process: to solve
* this, the input data is split into multiple chunks starting at different timestamps. * this, the input data is split into multiple chunks starting at different timestamps.
*/ */
@property(nonatomic, readonly) NSInteger timestampMs; @property(nonatomic, readonly) NSInteger timestampInMilliseconds;
/** /**
* Initializes a new `MPPClassificationResult` with the given array of classifications and time * Initializes a new `MPPClassificationResult` with the given array of classifications and time
@ -98,14 +98,15 @@ NS_SWIFT_NAME(ClassificationResult)
* *
* @param classifications An Array of `MPPClassifications` objects containing the predicted * @param classifications An Array of `MPPClassifications` objects containing the predicted
* categories for each head of the model. * categories for each head of the model.
* @param timestampMs The timestamp (in milliseconds) of the start of the chunk of data * @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data
* corresponding to these results. * corresponding to these results.
* *
* @return An instance of `MPPClassificationResult` initialized with the given array of * @return An instance of `MPPClassificationResult` initialized with the given array of
* classifications and timestampMs. * classifications and timestamp (in milliseconds).
*/ */
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications - (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; timestampInMilliseconds:(NSInteger)timestampInMilliseconds
NS_DESIGNATED_INITIALIZER;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;

View File

@ -38,11 +38,11 @@
@implementation MPPClassificationResult @implementation MPPClassificationResult
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications - (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super init]; self = [super init];
if (self) { if (self) {
_classifications = classifications; _classifications = classifications;
_timestampMs = timestampMs; _timestampInMilliseconds = timestampInMilliseconds;
} }
return self; return self;

View File

@ -33,7 +33,7 @@ NS_SWIFT_NAME(EmbeddingResult)
* cases, the amount of data to process might exceed the maximum size that the model can process. To * cases, the amount of data to process might exceed the maximum size that the model can process. To
* solve this, the input data is split into multiple chunks starting at different timestamps. * solve this, the input data is split into multiple chunks starting at different timestamps.
*/ */
@property(nonatomic, readonly) NSInteger timestampMs; @property(nonatomic, readonly) NSInteger timestampInMilliseconds;
/** /**
* Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in * Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in
@ -41,14 +41,14 @@ NS_SWIFT_NAME(EmbeddingResult)
* *
* @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each * @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each
* head of the model. * head of the model.
* @param timestampMs The optional timestamp (in milliseconds) of the start of the chunk of data * @param timestampInMilliseconds The optional timestamp (in milliseconds) of the start of the chunk
* corresponding to these results. Pass `0` if timestamp is absent. * of data corresponding to these results. Pass `0` if timestamp is absent.
* *
* @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and * @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and
* timestampMs. * timestamp (in milliseconds).
*/ */
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings - (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;

View File

@ -17,11 +17,11 @@
@implementation MPPEmbeddingResult @implementation MPPEmbeddingResult
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings - (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super init]; self = [super init];
if (self) { if (self) {
_embeddings = embeddings; _embeddings = embeddings;
_timestampMs = timestampMs; _timestampInMilliseconds = timestampInMilliseconds;
} }
return self; return self;

View File

@ -55,13 +55,13 @@ using ClassificationResultProto =
[classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]]; [classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]];
} }
NSInteger timestampMs = 0; NSInteger timestampInMilliseconds = 0;
if (classificationResultProto.has_timestamp_ms()) { if (classificationResultProto.has_timestamp_ms()) {
timestampMs = (NSInteger)classificationResultProto.timestamp_ms(); timestampInMilliseconds = (NSInteger)classificationResultProto.timestamp_ms();
} }
return [[MPPClassificationResult alloc] initWithClassifications:classifications return [[MPPClassificationResult alloc] initWithClassifications:classifications
timestampMs:timestampMs]; timestampInMilliseconds:timestampInMilliseconds];
; ;
} }

View File

@ -31,12 +31,13 @@ using EmbeddingResultProto = ::mediapipe::tasks::components::containers::proto::
[embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]]; [embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]];
} }
NSInteger timestampMs = 0; NSInteger timestampInMilliseconds = 0;
if (embeddingResultProto.has_timestamp_ms()) { if (embeddingResultProto.has_timestamp_ms()) {
timestampMs = (NSInteger)embeddingResultProto.timestamp_ms(); timestampInMilliseconds = (NSInteger)embeddingResultProto.timestamp_ms();
} }
return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings timestampMs:timestampMs]; return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings
timestampInMilliseconds:timestampInMilliseconds];
} }
@end @end

View File

@ -26,11 +26,12 @@ NS_SWIFT_NAME(TaskResult)
/** /**
* Timestamp that is associated with the task result object. * Timestamp that is associated with the task result object.
*/ */
@property(nonatomic, assign, readonly) NSInteger timestampMs; @property(nonatomic, assign, readonly) NSInteger timestampInMilliseconds;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER; - (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds
NS_DESIGNATED_INITIALIZER;
@end @end

View File

@ -16,16 +16,16 @@
@implementation MPPTaskResult @implementation MPPTaskResult
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs { - (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super init]; self = [super init];
if (self) { if (self) {
_timestampMs = timestampMs; _timestampInMilliseconds = timestampInMilliseconds;
} }
return self; return self;
} }
- (id)copyWithZone:(NSZone *)zone { - (id)copyWithZone:(NSZone *)zone {
return [[MPPTaskResult alloc] initWithTimestampMs:self.timestampMs]; return [[MPPTaskResult alloc] initWithTimestampInMilliseconds:self.timestampInMilliseconds];
} }
@end @end

View File

@ -487,7 +487,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
NSError *liveStreamApiCallError; NSError *liveStreamApiCallError;
XCTAssertFalse([imageClassifier classifyAsyncImage:image XCTAssertFalse([imageClassifier classifyAsyncImage:image
timestampMs:0 timestampInMilliseconds:0
error:&liveStreamApiCallError]); error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError = NSError *expectedLiveStreamApiCallError =
@ -501,7 +501,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
NSError *videoApiCallError; NSError *videoApiCallError;
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]); XCTAssertFalse([imageClassifier classifyVideoFrame:image
timestampInMilliseconds:0
error:&videoApiCallError]);
NSError *expectedVideoApiCallError = NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -524,7 +526,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
NSError *liveStreamApiCallError; NSError *liveStreamApiCallError;
XCTAssertFalse([imageClassifier classifyAsyncImage:image XCTAssertFalse([imageClassifier classifyAsyncImage:image
timestampMs:0 timestampInMilliseconds:0
error:&liveStreamApiCallError]); error:&liveStreamApiCallError]);
NSError *expectedLiveStreamApiCallError = NSError *expectedLiveStreamApiCallError =
@ -575,7 +577,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
AssertEqualErrors(imageApiCallError, expectedImageApiCallError); AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
NSError *videoApiCallError; NSError *videoApiCallError;
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]); XCTAssertFalse([imageClassifier classifyVideoFrame:image
timestampInMilliseconds:0
error:&videoApiCallError]);
NSError *expectedVideoApiCallError = NSError *expectedVideoApiCallError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -601,7 +605,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image
timestampMs:i timestampInMilliseconds:i
error:nil]; error:nil];
[self assertImageClassifierResult:imageClassifierResult [self assertImageClassifierResult:imageClassifierResult
hasExpectedCategoriesCount:maxResults hasExpectedCategoriesCount:maxResults
@ -630,10 +634,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
MPPImage *image = [self imageWithFileInfo:kBurgerImage]; MPPImage *image = [self imageWithFileInfo:kBurgerImage];
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:1 error:nil]); XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]);
NSError *error; NSError *error;
XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampMs:0 error:&error]); XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampInMilliseconds:0 error:&error]);
NSError *expectedError = NSError *expectedError =
[NSError errorWithDomain:kExpectedErrorDomain [NSError errorWithDomain:kExpectedErrorDomain
@ -668,7 +672,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
MPPImage *image = [self imageWithFileInfo:kBurgerImage]; MPPImage *image = [self imageWithFileInfo:kBurgerImage];
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:i error:nil]); XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]);
} }
} }

View File

@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextClassifierResult)
* *
* @param classificationResult The `MPPClassificationResult` instance containing one set of results * @param classificationResult The `MPPClassificationResult` instance containing one set of results
* per classifier head. * per classifier head.
* @param timestampMs The timestamp for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPTextClassifierResult` initialized with the given * @return An instance of `MPPTextClassifierResult` initialized with the given
* `MPPClassificationResult` and timestamp (in milliseconds). * `MPPClassificationResult` and timestamp (in milliseconds).
*/ */
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampMs:(NSInteger)timestampMs; timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end @end

View File

@ -17,8 +17,8 @@
@implementation MPPTextClassifierResult @implementation MPPTextClassifierResult
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super initWithTimestampMs:timestampMs]; self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
if (self) { if (self) {
_classificationResult = classificationResult; _classificationResult = classificationResult;
} }

View File

@ -35,7 +35,7 @@ using ::mediapipe::Packet;
return [[MPPTextClassifierResult alloc] return [[MPPTextClassifierResult alloc]
initWithClassificationResult:classificationResult initWithClassificationResult:classificationResult
timestampMs:(NSInteger)(packet.Timestamp().Value() / timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)]; kMicroSecondsPerMilliSecond)];
} }

View File

@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextEmbedderResult)
* *
* @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results * @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results
* per classifier head. * per classifier head.
* @param timestampMs The timestamp for this result. * @param timestampInMilliseconds The timestamp (in millisecondss) for this result.
* *
* @return An instance of `MPPTextEmbedderResult` initialized with the given * @return An instance of `MPPTextEmbedderResult` initialized with the given
* `MPPEmbeddingResult` and timestamp (in milliseconds). * `MPPEmbeddingResult` and timestamp (in milliseconds).
*/ */
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult - (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
timestampMs:(NSInteger)timestampMs; timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;

View File

@ -17,8 +17,8 @@
@implementation MPPTextEmbedderResult @implementation MPPTextEmbedderResult
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult - (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super initWithTimestampMs:timestampMs]; self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
if (self) { if (self) {
_embeddingResult = embeddingResult; _embeddingResult = embeddingResult;
} }

View File

@ -34,7 +34,7 @@ using ::mediapipe::Packet;
return [[MPPTextEmbedderResult alloc] return [[MPPTextEmbedderResult alloc]
initWithEmbeddingResult:embeddingResult initWithEmbeddingResult:embeddingResult
timestampMs:(NSInteger)(packet.Timestamp().Value() / timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)]; kMicroSecondsPerMilliSecond)];
} }

View File

@ -122,17 +122,17 @@ NS_SWIFT_NAME(ImageClassifier)
* `MPPRunningModeVideo`. * `MPPRunningModeVideo`.
* *
* @param image The `MPPImage` on which image classification is to be performed. * @param image The `MPPImage` on which image classification is to be performed.
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
* classification on the input video frame. * classification on the input video frame.
* *
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `MPPImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(classify(videoFrame:timestampMs:)); NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:));
/** /**
* Performs image classification on the provided video frame of type `MPPImage` cropped to the * Performs image classification on the provided video frame of type `MPPImage` cropped to the
@ -145,8 +145,8 @@ NS_SWIFT_NAME(ImageClassifier)
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* monotonically increasing. * timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the video frame of type * @param roi A `CGRect` specifying the region of interest within the video frame of type
* `MPPImage`, on which image classification should be performed. * `MPPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
@ -155,10 +155,10 @@ NS_SWIFT_NAME(ImageClassifier)
* @return An `MPPImageClassifierResult` object that contains a list of image classifications. * @return An `MPPImageClassifierResult` object that contains a list of image classifications.
*/ */
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(classify(videoFrame:timestampMs:regionOfInterest:)); NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:));
/** /**
* Sends live stream image data of type `MPPImage` to perform image classification using the whole * Sends live stream image data of type `MPPImage` to perform image classification using the whole
@ -172,16 +172,17 @@ NS_SWIFT_NAME(ImageClassifier)
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* to the image classifier. The input timestamps must be monotonically increasing. * image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
* classification on the input live stream image data. * classification on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/ */
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error NS_SWIFT_NAME(classifyAsync(image:timestampMs:)); error:(NSError **)error
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
@ -195,8 +196,8 @@ NS_SWIFT_NAME(ImageClassifier)
* *
* @param image A live stream image data of type `MPPImage` on which image classification is to be * @param image A live stream image data of type `MPPImage` on which image classification is to be
* performed. * performed.
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* to the image classifier. The input timestamps must be monotonically increasing. * image is sent to the image classifier. The input timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given live stream image data * @param roi A `CGRect` specifying the region of interest within the given live stream image data
* of type `MPPImage`, on which image classification should be performed. * of type `MPPImage`, on which image classification should be performed.
* @param error An optional error parameter populated when there is an error in performing image * @param error An optional error parameter populated when there is an error in performing image
@ -205,10 +206,10 @@ NS_SWIFT_NAME(ImageClassifier)
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/ */
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(classifyAsync(image:timestampMs:regionOfInterest:)); NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:regionOfInterest:));
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;

View File

@ -149,7 +149,7 @@ static NSString *const kTaskGraphName =
} }
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image - (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<NormalizedRect> rect = std::optional<NormalizedRect> rect =
@ -162,14 +162,15 @@ static NSString *const kTaskGraphName =
} }
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (imagePacket.IsEmpty()) { if (imagePacket.IsEmpty()) {
return std::nullopt; return std::nullopt;
} }
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() Packet normalizedRectPacket =
timestampMs:timestampMs]; [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap; return inputPacketMap;
@ -180,11 +181,11 @@ static NSString *const kTaskGraphName =
} }
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:roi regionOfInterest:roi
error:error]; error:error];
if (!inputPacketMap.has_value()) { if (!inputPacketMap.has_value()) {
@ -204,20 +205,20 @@ static NSString *const kTaskGraphName =
} }
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
return [self classifyVideoFrame:image return [self classifyVideoFrame:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:CGRectZero regionOfInterest:CGRectZero
error:error]; error:error];
} }
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:roi regionOfInterest:roi
error:error]; error:error];
if (!inputPacketMap.has_value()) { if (!inputPacketMap.has_value()) {
@ -228,10 +229,10 @@ static NSString *const kTaskGraphName =
} }
- (BOOL)classifyAsyncImage:(MPPImage *)image - (BOOL)classifyAsyncImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
return [self classifyAsyncImage:image return [self classifyAsyncImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:CGRectZero regionOfInterest:CGRectZero
error:error]; error:error];
} }

View File

@ -31,13 +31,13 @@ NS_SWIFT_NAME(ImageClassifierResult)
* *
* @param classificationResult The `MPPClassificationResult` instance containing one set of results * @param classificationResult The `MPPClassificationResult` instance containing one set of results
* per classifier head. * per classifier head.
* @param timestampMs The timestamp for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPImageClassifierResult` initialized with the given * @return An instance of `MPPImageClassifierResult` initialized with the given
* `MPPClassificationResult` and timestamp (in milliseconds). * `MPPClassificationResult` and timestamp (in milliseconds).
*/ */
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampMs:(NSInteger)timestampMs; timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end @end

View File

@ -17,8 +17,8 @@
@implementation MPPImageClassifierResult @implementation MPPImageClassifierResult
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super initWithTimestampMs:timestampMs]; self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
if (self) { if (self) {
_classificationResult = classificationResult; _classificationResult = classificationResult;
} }

View File

@ -34,7 +34,7 @@ using ::mediapipe::Packet;
return [[MPPImageClassifierResult alloc] return [[MPPImageClassifierResult alloc]
initWithClassificationResult:classificationResult initWithClassificationResult:classificationResult
timestampMs:(NSInteger)(packet.Timestamp().Value() / timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)]; kMicroSecondsPerMilliSecond)];
} }

View File

@ -36,13 +36,13 @@ NS_SWIFT_NAME(ObjectDetectionResult)
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is * @param detections An array of `MPPDetection` objects each of which has a bounding box that is
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
* x [0,image_height)`, which are the dimensions of the underlying image data. * x [0,image_height)`, which are the dimensions of the underlying image data.
* @param timestampMs The timestamp for this result. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
* *
* @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections * @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections
* and timestamp (in milliseconds). * and timestamp (in milliseconds).
*/ */
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections - (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
timestampMs:(NSInteger)timestampMs; timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
@end @end

View File

@ -17,8 +17,8 @@
@implementation MPPObjectDetectionResult @implementation MPPObjectDetectionResult
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections - (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
timestampMs:(NSInteger)timestampMs { timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
self = [super initWithTimestampMs:timestampMs]; self = [super initWithTimestampMs:timestampInMilliseconds];
if (self) { if (self) {
_detections = detections; _detections = detections;
} }

View File

@ -138,8 +138,8 @@ NS_SWIFT_NAME(ObjectDetector)
* `MPPRunningModeVideo`. * `MPPRunningModeVideo`.
* *
* @param image The `MPPImage` on which object detection is to be performed. * @param image The `MPPImage` on which object detection is to be performed.
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* monotonically increasing. * timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object * @param error An optional error parameter populated when there is an error in performing object
* detection on the input image. * detection on the input image.
* *
@ -149,9 +149,9 @@ NS_SWIFT_NAME(ObjectDetector)
* image data. * image data.
*/ */
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampMs:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
/** /**
* Performs object detection on the provided video frame of type `MPPImage` cropped to the * Performs object detection on the provided video frame of type `MPPImage` cropped to the
@ -164,8 +164,8 @@ NS_SWIFT_NAME(ObjectDetector)
* *
* @param image A live stream image data of type `MPPImage` on which object detection is to be * @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed. * performed.
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
* monotonically increasing. * timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
* object detection should be performed. * object detection should be performed.
* *
@ -178,10 +178,10 @@ NS_SWIFT_NAME(ObjectDetector)
* image data. * image data.
*/ */
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(detect(videoFrame:timestampMs:regionOfInterest:)); NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:regionOfInterest:));
/** /**
* Sends live stream image data of type `MPPImage` to perform object detection using the whole * Sends live stream image data of type `MPPImage` to perform object detection using the whole
@ -195,16 +195,17 @@ NS_SWIFT_NAME(ObjectDetector)
* *
* @param image A live stream image data of type `MPPImage` on which object detection is to be * @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed. * performed.
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* to the object detector. The input timestamps must be monotonically increasing. * image is sent to the object detector. The input timestamps must be monotonically increasing.
* @param error An optional error parameter populated when there is an error in performing object * @param error An optional error parameter populated when there is an error in performing object
* detection on the input live stream image data. * detection on the input live stream image data.
* *
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampMs:)); error:(NSError **)error
NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
/** /**
* Sends live stream image data of type `MPPImage` to perform object detection, cropped to the * Sends live stream image data of type `MPPImage` to perform object detection, cropped to the
@ -218,8 +219,8 @@ NS_SWIFT_NAME(ObjectDetector)
* *
* @param image A live stream image data of type `MPPImage` on which object detection is to be * @param image A live stream image data of type `MPPImage` on which object detection is to be
* performed. * performed.
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
* to the object detector. The input timestamps must be monotonically increasing. * image is sent to the object detector. The input timestamps must be monotonically increasing.
* @param roi A `CGRect` specifying the region of interest within the given live stream image data * @param roi A `CGRect` specifying the region of interest within the given live stream image data
* of type `MPPImage`, on which iobject detection should be performed. * of type `MPPImage`, on which iobject detection should be performed.
* @param error An optional error parameter populated when there is an error in performing object * @param error An optional error parameter populated when there is an error in performing object
@ -228,10 +229,10 @@ NS_SWIFT_NAME(ObjectDetector)
* @return `YES` if the image was sent to the task successfully, otherwise `NO`. * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
*/ */
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error error:(NSError **)error
NS_SWIFT_NAME(detectAsync(image:timestampMs:regionOfInterest:)); NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:regionOfInterest:));
- (instancetype)init NS_UNAVAILABLE; - (instancetype)init NS_UNAVAILABLE;

View File

@ -157,7 +157,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
} }
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image - (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<NormalizedRect> rect = std::optional<NormalizedRect> rect =
@ -170,14 +170,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
} }
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
error:error]; error:error];
if (imagePacket.IsEmpty()) { if (imagePacket.IsEmpty()) {
return std::nullopt; return std::nullopt;
} }
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() Packet normalizedRectPacket =
timestampMs:timestampMs]; [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
timestampInMilliseconds:timestampInMilliseconds];
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
return inputPacketMap; return inputPacketMap;
@ -188,11 +189,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
} }
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:roi regionOfInterest:roi
error:error]; error:error];
if (!inputPacketMap.has_value()) { if (!inputPacketMap.has_value()) {
@ -212,20 +213,20 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
} }
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image - (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
return [self detectInVideoFrame:image return [self detectInVideoFrame:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:CGRectZero regionOfInterest:CGRectZero
error:error]; error:error];
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
regionOfInterest:(CGRect)roi regionOfInterest:(CGRect)roi
error:(NSError **)error { error:(NSError **)error {
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:roi regionOfInterest:roi
error:error]; error:error];
if (!inputPacketMap.has_value()) { if (!inputPacketMap.has_value()) {
@ -236,10 +237,10 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
} }
- (BOOL)detectAsyncInImage:(MPPImage *)image - (BOOL)detectAsyncInImage:(MPPImage *)image
timestampMs:(NSInteger)timestampMs timestampInMilliseconds:(NSInteger)timestampInMilliseconds
error:(NSError **)error { error:(NSError **)error {
return [self detectAsyncInImage:image return [self detectAsyncInImage:image
timestampMs:timestampMs timestampInMilliseconds:timestampInMilliseconds
regionOfInterest:CGRectZero regionOfInterest:CGRectZero
error:error]; error:error];
} }

View File

@ -38,8 +38,9 @@ using ::mediapipe::Packet;
} }
return [[MPPObjectDetectionResult alloc] return [[MPPObjectDetectionResult alloc]
initWithDetections:detections initWithDetections:detections
timestampMs:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)]; timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
kMicroSecondsPerMilliSecond)];
} }
@end @end