Merge pull request #4283 from priankakariatyml:ios-timestamp-renaming
PiperOrigin-RevId: 524420908
This commit is contained in:
		
						commit
						8b3395edfb
					
				| 
						 | 
					@ -90,7 +90,7 @@ NS_SWIFT_NAME(ClassificationResult)
 | 
				
			||||||
 * amount of data to process might exceed the maximum size that the model can process: to solve
 | 
					 * amount of data to process might exceed the maximum size that the model can process: to solve
 | 
				
			||||||
 * this, the input data is split into multiple chunks starting at different timestamps.
 | 
					 * this, the input data is split into multiple chunks starting at different timestamps.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
@property(nonatomic, readonly) NSInteger timestampMs;
 | 
					@property(nonatomic, readonly) NSInteger timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Initializes a new `MPPClassificationResult` with the given array of classifications and time
 | 
					 * Initializes a new `MPPClassificationResult` with the given array of classifications and time
 | 
				
			||||||
| 
						 | 
					@ -98,14 +98,15 @@ NS_SWIFT_NAME(ClassificationResult)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param classifications An Array of `MPPClassifications` objects containing the predicted
 | 
					 * @param classifications An Array of `MPPClassifications` objects containing the predicted
 | 
				
			||||||
 * categories for each head of the model.
 | 
					 * categories for each head of the model.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) of the start of the chunk of data
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data
 | 
				
			||||||
 * corresponding to these results.
 | 
					 * corresponding to these results.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPClassificationResult` initialized with the given array of
 | 
					 * @return An instance of `MPPClassificationResult` initialized with the given array of
 | 
				
			||||||
 * classifications and timestampMs.
 | 
					 * classifications and timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
 | 
					- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
 | 
				
			||||||
                            timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
 | 
					                timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
 | 
					    NS_DESIGNATED_INITIALIZER;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -38,11 +38,11 @@
 | 
				
			||||||
@implementation MPPClassificationResult
 | 
					@implementation MPPClassificationResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
 | 
					- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
 | 
				
			||||||
                            timestampMs:(NSInteger)timestampMs {
 | 
					                timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super init];
 | 
					  self = [super init];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _classifications = classifications;
 | 
					    _classifications = classifications;
 | 
				
			||||||
    _timestampMs = timestampMs;
 | 
					    _timestampInMilliseconds = timestampInMilliseconds;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return self;
 | 
					  return self;
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -33,7 +33,7 @@ NS_SWIFT_NAME(EmbeddingResult)
 | 
				
			||||||
 * cases, the amount of data to process might exceed the maximum size that the model can process. To
 | 
					 * cases, the amount of data to process might exceed the maximum size that the model can process. To
 | 
				
			||||||
 * solve this, the input data is split into multiple chunks starting at different timestamps.
 | 
					 * solve this, the input data is split into multiple chunks starting at different timestamps.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
@property(nonatomic, readonly) NSInteger timestampMs;
 | 
					@property(nonatomic, readonly) NSInteger timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in
 | 
					 * Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in
 | 
				
			||||||
| 
						 | 
					@ -41,14 +41,14 @@ NS_SWIFT_NAME(EmbeddingResult)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each
 | 
					 * @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each
 | 
				
			||||||
 * head of the model.
 | 
					 * head of the model.
 | 
				
			||||||
 * @param timestampMs The optional timestamp (in milliseconds) of the start of the chunk of data
 | 
					 * @param timestampInMilliseconds The optional timestamp (in milliseconds) of the start of the chunk
 | 
				
			||||||
 * corresponding to these results. Pass `0` if timestamp is absent.
 | 
					 * of data corresponding to these results. Pass `0` if timestamp is absent.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and
 | 
					 * @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and
 | 
				
			||||||
 * timestampMs.
 | 
					 * timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
 | 
					- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
 | 
				
			||||||
                       timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
 | 
					           timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -17,11 +17,11 @@
 | 
				
			||||||
@implementation MPPEmbeddingResult
 | 
					@implementation MPPEmbeddingResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
 | 
					- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
 | 
				
			||||||
                       timestampMs:(NSInteger)timestampMs {
 | 
					           timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super init];
 | 
					  self = [super init];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _embeddings = embeddings;
 | 
					    _embeddings = embeddings;
 | 
				
			||||||
    _timestampMs = timestampMs;
 | 
					    _timestampInMilliseconds = timestampInMilliseconds;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return self;
 | 
					  return self;
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -55,13 +55,13 @@ using ClassificationResultProto =
 | 
				
			||||||
    [classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]];
 | 
					    [classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]];
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSInteger timestampMs = 0;
 | 
					  NSInteger timestampInMilliseconds = 0;
 | 
				
			||||||
  if (classificationResultProto.has_timestamp_ms()) {
 | 
					  if (classificationResultProto.has_timestamp_ms()) {
 | 
				
			||||||
    timestampMs = (NSInteger)classificationResultProto.timestamp_ms();
 | 
					    timestampInMilliseconds = (NSInteger)classificationResultProto.timestamp_ms();
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPClassificationResult alloc] initWithClassifications:classifications
 | 
					  return [[MPPClassificationResult alloc] initWithClassifications:classifications
 | 
				
			||||||
                                                      timestampMs:timestampMs];
 | 
					                                          timestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  ;
 | 
					  ;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -31,12 +31,13 @@ using EmbeddingResultProto = ::mediapipe::tasks::components::containers::proto::
 | 
				
			||||||
    [embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]];
 | 
					    [embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]];
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSInteger timestampMs = 0;
 | 
					  NSInteger timestampInMilliseconds = 0;
 | 
				
			||||||
  if (embeddingResultProto.has_timestamp_ms()) {
 | 
					  if (embeddingResultProto.has_timestamp_ms()) {
 | 
				
			||||||
    timestampMs = (NSInteger)embeddingResultProto.timestamp_ms();
 | 
					    timestampInMilliseconds = (NSInteger)embeddingResultProto.timestamp_ms();
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings timestampMs:timestampMs];
 | 
					  return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings
 | 
				
			||||||
 | 
					                                timestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -26,11 +26,12 @@ NS_SWIFT_NAME(TaskResult)
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Timestamp that is associated with the task result object.
 | 
					 * Timestamp that is associated with the task result object.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
@property(nonatomic, assign, readonly) NSInteger timestampMs;
 | 
					@property(nonatomic, assign, readonly) NSInteger timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
 | 
					- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
 | 
					    NS_DESIGNATED_INITIALIZER;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -16,16 +16,16 @@
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@implementation MPPTaskResult
 | 
					@implementation MPPTaskResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs {
 | 
					- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super init];
 | 
					  self = [super init];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _timestampMs = timestampMs;
 | 
					    _timestampInMilliseconds = timestampInMilliseconds;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  return self;
 | 
					  return self;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (id)copyWithZone:(NSZone *)zone {
 | 
					- (id)copyWithZone:(NSZone *)zone {
 | 
				
			||||||
  return [[MPPTaskResult alloc] initWithTimestampMs:self.timestampMs];
 | 
					  return [[MPPTaskResult alloc] initWithTimestampInMilliseconds:self.timestampInMilliseconds];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -487,7 +487,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *liveStreamApiCallError;
 | 
					  NSError *liveStreamApiCallError;
 | 
				
			||||||
  XCTAssertFalse([imageClassifier classifyAsyncImage:image
 | 
					  XCTAssertFalse([imageClassifier classifyAsyncImage:image
 | 
				
			||||||
                                         timestampMs:0
 | 
					                             timestampInMilliseconds:0
 | 
				
			||||||
                                               error:&liveStreamApiCallError]);
 | 
					                                               error:&liveStreamApiCallError]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *expectedLiveStreamApiCallError =
 | 
					  NSError *expectedLiveStreamApiCallError =
 | 
				
			||||||
| 
						 | 
					@ -501,7 +501,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
					  AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *videoApiCallError;
 | 
					  NSError *videoApiCallError;
 | 
				
			||||||
  XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
 | 
					  XCTAssertFalse([imageClassifier classifyVideoFrame:image
 | 
				
			||||||
 | 
					                             timestampInMilliseconds:0
 | 
				
			||||||
 | 
					                                               error:&videoApiCallError]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *expectedVideoApiCallError =
 | 
					  NSError *expectedVideoApiCallError =
 | 
				
			||||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
					      [NSError errorWithDomain:kExpectedErrorDomain
 | 
				
			||||||
| 
						 | 
					@ -524,7 +526,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *liveStreamApiCallError;
 | 
					  NSError *liveStreamApiCallError;
 | 
				
			||||||
  XCTAssertFalse([imageClassifier classifyAsyncImage:image
 | 
					  XCTAssertFalse([imageClassifier classifyAsyncImage:image
 | 
				
			||||||
                                         timestampMs:0
 | 
					                             timestampInMilliseconds:0
 | 
				
			||||||
                                               error:&liveStreamApiCallError]);
 | 
					                                               error:&liveStreamApiCallError]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *expectedLiveStreamApiCallError =
 | 
					  NSError *expectedLiveStreamApiCallError =
 | 
				
			||||||
| 
						 | 
					@ -575,7 +577,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
  AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
 | 
					  AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *videoApiCallError;
 | 
					  NSError *videoApiCallError;
 | 
				
			||||||
  XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
 | 
					  XCTAssertFalse([imageClassifier classifyVideoFrame:image
 | 
				
			||||||
 | 
					                             timestampInMilliseconds:0
 | 
				
			||||||
 | 
					                                               error:&videoApiCallError]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *expectedVideoApiCallError =
 | 
					  NSError *expectedVideoApiCallError =
 | 
				
			||||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
					      [NSError errorWithDomain:kExpectedErrorDomain
 | 
				
			||||||
| 
						 | 
					@ -601,7 +605,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  for (int i = 0; i < 3; i++) {
 | 
					  for (int i = 0; i < 3; i++) {
 | 
				
			||||||
    MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image
 | 
					    MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image
 | 
				
			||||||
                                                                              timestampMs:i
 | 
					                                                                  timestampInMilliseconds:i
 | 
				
			||||||
                                                                                    error:nil];
 | 
					                                                                                    error:nil];
 | 
				
			||||||
    [self assertImageClassifierResult:imageClassifierResult
 | 
					    [self assertImageClassifierResult:imageClassifierResult
 | 
				
			||||||
           hasExpectedCategoriesCount:maxResults
 | 
					           hasExpectedCategoriesCount:maxResults
 | 
				
			||||||
| 
						 | 
					@ -630,10 +634,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  MPPImage *image = [self imageWithFileInfo:kBurgerImage];
 | 
					  MPPImage *image = [self imageWithFileInfo:kBurgerImage];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:1 error:nil]);
 | 
					  XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *error;
 | 
					  NSError *error;
 | 
				
			||||||
  XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampMs:0 error:&error]);
 | 
					  XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampInMilliseconds:0 error:&error]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  NSError *expectedError =
 | 
					  NSError *expectedError =
 | 
				
			||||||
      [NSError errorWithDomain:kExpectedErrorDomain
 | 
					      [NSError errorWithDomain:kExpectedErrorDomain
 | 
				
			||||||
| 
						 | 
					@ -668,7 +672,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
 | 
				
			||||||
  MPPImage *image = [self imageWithFileInfo:kBurgerImage];
 | 
					  MPPImage *image = [self imageWithFileInfo:kBurgerImage];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  for (int i = 0; i < 3; i++) {
 | 
					  for (int i = 0; i < 3; i++) {
 | 
				
			||||||
    XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:i error:nil]);
 | 
					    XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextClassifierResult)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param classificationResult The `MPPClassificationResult` instance containing one set of results
 | 
					 * @param classificationResult The `MPPClassificationResult` instance containing one set of results
 | 
				
			||||||
 * per classifier head.
 | 
					 * per classifier head.
 | 
				
			||||||
 * @param timestampMs The timestamp for this result.
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPTextClassifierResult` initialized with the given
 | 
					 * @return An instance of `MPPTextClassifierResult` initialized with the given
 | 
				
			||||||
 * `MPPClassificationResult` and timestamp (in milliseconds).
 | 
					 * `MPPClassificationResult` and timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
					- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
				
			||||||
                                 timestampMs:(NSInteger)timestampMs;
 | 
					                     timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -17,8 +17,8 @@
 | 
				
			||||||
@implementation MPPTextClassifierResult
 | 
					@implementation MPPTextClassifierResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
					- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
				
			||||||
                                 timestampMs:(NSInteger)timestampMs {
 | 
					                                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super initWithTimestampMs:timestampMs];
 | 
					  self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _classificationResult = classificationResult;
 | 
					    _classificationResult = classificationResult;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -35,7 +35,7 @@ using ::mediapipe::Packet;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPTextClassifierResult alloc]
 | 
					  return [[MPPTextClassifierResult alloc]
 | 
				
			||||||
      initWithClassificationResult:classificationResult
 | 
					      initWithClassificationResult:classificationResult
 | 
				
			||||||
                       timestampMs:(NSInteger)(packet.Timestamp().Value() /
 | 
					           timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
 | 
				
			||||||
                                               kMicroSecondsPerMilliSecond)];
 | 
					                                               kMicroSecondsPerMilliSecond)];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextEmbedderResult)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results
 | 
					 * @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results
 | 
				
			||||||
 * per classifier head.
 | 
					 * per classifier head.
 | 
				
			||||||
 * @param timestampMs The timestamp for this result.
 | 
					 * @param timestampInMilliseconds The timestamp (in millisecondss) for this result.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPTextEmbedderResult` initialized with the given
 | 
					 * @return An instance of `MPPTextEmbedderResult` initialized with the given
 | 
				
			||||||
 * `MPPEmbeddingResult` and timestamp (in milliseconds).
 | 
					 * `MPPEmbeddingResult` and timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
 | 
					- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
 | 
				
			||||||
                            timestampMs:(NSInteger)timestampMs;
 | 
					                timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -17,8 +17,8 @@
 | 
				
			||||||
@implementation MPPTextEmbedderResult
 | 
					@implementation MPPTextEmbedderResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
 | 
					- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
 | 
				
			||||||
                            timestampMs:(NSInteger)timestampMs {
 | 
					                timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super initWithTimestampMs:timestampMs];
 | 
					  self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _embeddingResult = embeddingResult;
 | 
					    _embeddingResult = embeddingResult;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -34,7 +34,7 @@ using ::mediapipe::Packet;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPTextEmbedderResult alloc]
 | 
					  return [[MPPTextEmbedderResult alloc]
 | 
				
			||||||
      initWithEmbeddingResult:embeddingResult
 | 
					      initWithEmbeddingResult:embeddingResult
 | 
				
			||||||
                  timestampMs:(NSInteger)(packet.Timestamp().Value() /
 | 
					      timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
 | 
				
			||||||
                                          kMicroSecondsPerMilliSecond)];
 | 
					                                          kMicroSecondsPerMilliSecond)];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -41,7 +41,7 @@
 | 
				
			||||||
 * timestamp.
 | 
					 * timestamp.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image The image to send to the MediaPipe graph.
 | 
					 * @param image The image to send to the MediaPipe graph.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) to assign to the packet.
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) to assign to the packet.
 | 
				
			||||||
 * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no
 | 
					 * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no
 | 
				
			||||||
 *    error will be saved.
 | 
					 *    error will be saved.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
| 
						 | 
					@ -49,7 +49,7 @@
 | 
				
			||||||
 *    occurred during the conversion.
 | 
					 *    occurred during the conversion.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
+ (mediapipe::Packet)createPacketWithMPPImage:(MPPImage *)image
 | 
					+ (mediapipe::Packet)createPacketWithMPPImage:(MPPImage *)image
 | 
				
			||||||
                                  timestampMs:(NSInteger)timestampMs
 | 
					                      timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                        error:(NSError **)error;
 | 
					                                        error:(NSError **)error;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
| 
						 | 
					@ -66,11 +66,11 @@
 | 
				
			||||||
 * specified timestamp.
 | 
					 * specified timestamp.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image The `NormalizedRect` to send to the MediaPipe graph.
 | 
					 * @param image The `NormalizedRect` to send to the MediaPipe graph.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) to assign to the packet.
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) to assign to the packet.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return The MediaPipe packet containing the normalized rect.
 | 
					 * @return The MediaPipe packet containing the normalized rect.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
+ (mediapipe::Packet)createPacketWithNormalizedRect:(mediapipe::NormalizedRect &)normalizedRect
 | 
					+ (mediapipe::Packet)createPacketWithNormalizedRect:(mediapipe::NormalizedRect &)normalizedRect
 | 
				
			||||||
                                        timestampMs:(NSInteger)timestampMs;
 | 
					                            timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -42,7 +42,7 @@ using ::mediapipe::Timestamp;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
+ (Packet)createPacketWithMPPImage:(MPPImage *)image
 | 
					+ (Packet)createPacketWithMPPImage:(MPPImage *)image
 | 
				
			||||||
                       timestampMs:(NSInteger)timestampMs
 | 
					           timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                             error:(NSError **)error {
 | 
					                             error:(NSError **)error {
 | 
				
			||||||
  std::unique_ptr<ImageFrame> imageFrame = [image imageFrameWithError:error];
 | 
					  std::unique_ptr<ImageFrame> imageFrame = [image imageFrameWithError:error];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -51,7 +51,7 @@ using ::mediapipe::Timestamp;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return MakePacket<Image>(std::move(imageFrame))
 | 
					  return MakePacket<Image>(std::move(imageFrame))
 | 
				
			||||||
      .At(Timestamp(int64(timestampMs * kMicroSecondsPerMilliSecond)));
 | 
					      .At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond)));
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect {
 | 
					+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect {
 | 
				
			||||||
| 
						 | 
					@ -59,9 +59,9 @@ using ::mediapipe::Timestamp;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect
 | 
					+ (Packet)createPacketWithNormalizedRect:(NormalizedRect &)normalizedRect
 | 
				
			||||||
                             timestampMs:(NSInteger)timestampMs {
 | 
					                 timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  return MakePacket<NormalizedRect>(std::move(normalizedRect))
 | 
					  return MakePacket<NormalizedRect>(std::move(normalizedRect))
 | 
				
			||||||
      .At(Timestamp(int64(timestampMs * kMicroSecondsPerMilliSecond)));
 | 
					      .At(Timestamp(int64(timestampInMilliseconds * kMicroSecondsPerMilliSecond)));
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,7 +21,7 @@
 | 
				
			||||||
                       handedness:(NSArray<NSArray<MPPCategory *> *> *)handedness
 | 
					                       handedness:(NSArray<NSArray<MPPCategory *> *> *)handedness
 | 
				
			||||||
                         gestures:(NSArray<NSArray<MPPCategory *> *> *)gestures
 | 
					                         gestures:(NSArray<NSArray<MPPCategory *> *> *)gestures
 | 
				
			||||||
          timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
					          timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super initWithTimestampMs:timestampInMilliseconds];
 | 
					  self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _landmarks = landmarks;
 | 
					    _landmarks = landmarks;
 | 
				
			||||||
    _worldLandmarks = worldLandmarks;
 | 
					    _worldLandmarks = worldLandmarks;
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -122,17 +122,17 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 * `MPPRunningModeVideo`.
 | 
					 * `MPPRunningModeVideo`.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image The `MPPImage` on which image classification is to be performed.
 | 
					 * @param image The `MPPImage` on which image classification is to be performed.
 | 
				
			||||||
 * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
 | 
					 * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
 | 
				
			||||||
 * monotonically increasing.
 | 
					 * timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing image
 | 
					 * @param error An optional error parameter populated when there is an error in performing image
 | 
				
			||||||
 * classification on the input video frame.
 | 
					 * classification on the input video frame.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return  An `MPPImageClassifierResult` object that contains a list of image classifications.
 | 
					 * @return  An `MPPImageClassifierResult` object that contains a list of image classifications.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                                    error:(NSError **)error
 | 
					                                                    error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(classify(videoFrame:timestampMs:));
 | 
					    NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Performs image classification on the provided video frame of type `MPPImage` cropped to the
 | 
					 * Performs image classification on the provided video frame of type `MPPImage` cropped to the
 | 
				
			||||||
| 
						 | 
					@ -145,8 +145,8 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
 | 
					 * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
 | 
				
			||||||
 * monotonically increasing.
 | 
					 * timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param roi A `CGRect` specifying the region of interest within the video frame of type
 | 
					 * @param roi A `CGRect` specifying the region of interest within the video frame of type
 | 
				
			||||||
 * `MPPImage`, on which image classification should be performed.
 | 
					 * `MPPImage`, on which image classification should be performed.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing image
 | 
					 * @param error An optional error parameter populated when there is an error in performing image
 | 
				
			||||||
| 
						 | 
					@ -155,10 +155,10 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 * @return  An `MPPImageClassifierResult` object that contains a list of image classifications.
 | 
					 * @return  An `MPPImageClassifierResult` object that contains a list of image classifications.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                         regionOfInterest:(CGRect)roi
 | 
					                                         regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                    error:(NSError **)error
 | 
					                                                    error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(classify(videoFrame:timestampMs:regionOfInterest:));
 | 
					    NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Sends live stream image data of type `MPPImage` to perform image classification using the whole
 | 
					 * Sends live stream image data of type `MPPImage` to perform image classification using the whole
 | 
				
			||||||
| 
						 | 
					@ -172,16 +172,17 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
 | 
				
			||||||
 * to the image classifier. The input timestamps must be monotonically increasing.
 | 
					 * image is sent to the image classifier. The input timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing image
 | 
					 * @param error An optional error parameter populated when there is an error in performing image
 | 
				
			||||||
 * classification on the input live stream image data.
 | 
					 * classification on the input live stream image data.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
					 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
					- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                     error:(NSError **)error NS_SWIFT_NAME(classifyAsync(image:timestampMs:));
 | 
					                      error:(NSError **)error
 | 
				
			||||||
 | 
					    NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
 | 
					 * Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
 | 
				
			||||||
| 
						 | 
					@ -195,8 +196,8 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which image classification is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
 | 
				
			||||||
 * to the image classifier. The input timestamps must be monotonically increasing.
 | 
					 * image is sent to the image classifier. The input timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param roi A `CGRect` specifying the region of interest within the given live stream image data
 | 
					 * @param roi A `CGRect` specifying the region of interest within the given live stream image data
 | 
				
			||||||
 * of type `MPPImage`, on which image classification should be performed.
 | 
					 * of type `MPPImage`, on which image classification should be performed.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing image
 | 
					 * @param error An optional error parameter populated when there is an error in performing image
 | 
				
			||||||
| 
						 | 
					@ -205,10 +206,10 @@ NS_SWIFT_NAME(ImageClassifier)
 | 
				
			||||||
 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
					 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
					- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
           regionOfInterest:(CGRect)roi
 | 
					           regionOfInterest:(CGRect)roi
 | 
				
			||||||
                      error:(NSError **)error
 | 
					                      error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(classifyAsync(image:timestampMs:regionOfInterest:));
 | 
					    NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:regionOfInterest:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -149,7 +149,7 @@ static NSString *const kTaskGraphName =
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
 | 
					- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
 | 
				
			||||||
                                           timestampMs:(NSInteger)timestampMs
 | 
					                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                      regionOfInterest:(CGRect)roi
 | 
					                                      regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                 error:(NSError **)error {
 | 
					                                                 error:(NSError **)error {
 | 
				
			||||||
  std::optional<NormalizedRect> rect =
 | 
					  std::optional<NormalizedRect> rect =
 | 
				
			||||||
| 
						 | 
					@ -162,14 +162,15 @@ static NSString *const kTaskGraphName =
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
 | 
					  Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
 | 
				
			||||||
                                                            timestampMs:timestampMs
 | 
					                                                timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                                  error:error];
 | 
					                                                                  error:error];
 | 
				
			||||||
  if (imagePacket.IsEmpty()) {
 | 
					  if (imagePacket.IsEmpty()) {
 | 
				
			||||||
    return std::nullopt;
 | 
					    return std::nullopt;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
 | 
					  Packet normalizedRectPacket =
 | 
				
			||||||
                                                                           timestampMs:timestampMs];
 | 
					      [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
 | 
				
			||||||
 | 
					                                     timestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
 | 
					  PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
 | 
				
			||||||
  return inputPacketMap;
 | 
					  return inputPacketMap;
 | 
				
			||||||
| 
						 | 
					@ -180,11 +181,11 @@ static NSString *const kTaskGraphName =
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                         regionOfInterest:(CGRect)roi
 | 
					                                         regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                    error:(NSError **)error {
 | 
					                                                    error:(NSError **)error {
 | 
				
			||||||
  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
					  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
				
			||||||
                                                                 timestampMs:timestampMs
 | 
					                                                     timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                            regionOfInterest:roi
 | 
					                                                            regionOfInterest:roi
 | 
				
			||||||
                                                                       error:error];
 | 
					                                                                       error:error];
 | 
				
			||||||
  if (!inputPacketMap.has_value()) {
 | 
					  if (!inputPacketMap.has_value()) {
 | 
				
			||||||
| 
						 | 
					@ -204,20 +205,20 @@ static NSString *const kTaskGraphName =
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                                    error:(NSError **)error {
 | 
					                                                    error:(NSError **)error {
 | 
				
			||||||
  return [self classifyVideoFrame:image
 | 
					  return [self classifyVideoFrame:image
 | 
				
			||||||
                      timestampMs:timestampMs
 | 
					          timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                 regionOfInterest:CGRectZero
 | 
					                 regionOfInterest:CGRectZero
 | 
				
			||||||
                            error:error];
 | 
					                            error:error];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
					- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
           regionOfInterest:(CGRect)roi
 | 
					           regionOfInterest:(CGRect)roi
 | 
				
			||||||
                      error:(NSError **)error {
 | 
					                      error:(NSError **)error {
 | 
				
			||||||
  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
					  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
				
			||||||
                                                                 timestampMs:timestampMs
 | 
					                                                     timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                            regionOfInterest:roi
 | 
					                                                            regionOfInterest:roi
 | 
				
			||||||
                                                                       error:error];
 | 
					                                                                       error:error];
 | 
				
			||||||
  if (!inputPacketMap.has_value()) {
 | 
					  if (!inputPacketMap.has_value()) {
 | 
				
			||||||
| 
						 | 
					@ -228,10 +229,10 @@ static NSString *const kTaskGraphName =
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
					- (BOOL)classifyAsyncImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                      error:(NSError **)error {
 | 
					                      error:(NSError **)error {
 | 
				
			||||||
  return [self classifyAsyncImage:image
 | 
					  return [self classifyAsyncImage:image
 | 
				
			||||||
                      timestampMs:timestampMs
 | 
					          timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                 regionOfInterest:CGRectZero
 | 
					                 regionOfInterest:CGRectZero
 | 
				
			||||||
                            error:error];
 | 
					                            error:error];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -31,13 +31,13 @@ NS_SWIFT_NAME(ImageClassifierResult)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param classificationResult The `MPPClassificationResult` instance containing one set of results
 | 
					 * @param classificationResult The `MPPClassificationResult` instance containing one set of results
 | 
				
			||||||
 * per classifier head.
 | 
					 * per classifier head.
 | 
				
			||||||
 * @param timestampMs The timestamp for this result.
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPImageClassifierResult` initialized with the given
 | 
					 * @return An instance of `MPPImageClassifierResult` initialized with the given
 | 
				
			||||||
 * `MPPClassificationResult` and timestamp (in milliseconds).
 | 
					 * `MPPClassificationResult` and timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
					- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
				
			||||||
                                 timestampMs:(NSInteger)timestampMs;
 | 
					                     timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -17,8 +17,8 @@
 | 
				
			||||||
@implementation MPPImageClassifierResult
 | 
					@implementation MPPImageClassifierResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
					- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
 | 
				
			||||||
                                 timestampMs:(NSInteger)timestampMs {
 | 
					                     timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super initWithTimestampMs:timestampMs];
 | 
					  self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _classificationResult = classificationResult;
 | 
					    _classificationResult = classificationResult;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -34,7 +34,7 @@ using ::mediapipe::Packet;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPImageClassifierResult alloc]
 | 
					  return [[MPPImageClassifierResult alloc]
 | 
				
			||||||
      initWithClassificationResult:classificationResult
 | 
					      initWithClassificationResult:classificationResult
 | 
				
			||||||
                       timestampMs:(NSInteger)(packet.Timestamp().Value() /
 | 
					           timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
 | 
				
			||||||
                                               kMicroSecondsPerMilliSecond)];
 | 
					                                               kMicroSecondsPerMilliSecond)];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -36,13 +36,13 @@ NS_SWIFT_NAME(ObjectDetectionResult)
 | 
				
			||||||
 * @param detections An array of `MPPDetection` objects each of which has a bounding box that is
 | 
					 * @param detections An array of `MPPDetection` objects each of which has a bounding box that is
 | 
				
			||||||
 * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
 | 
					 * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
 | 
				
			||||||
 * x [0,image_height)`, which are the dimensions of the underlying image data.
 | 
					 * x [0,image_height)`, which are the dimensions of the underlying image data.
 | 
				
			||||||
 * @param timestampMs The timestamp for this result.
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections
 | 
					 * @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections
 | 
				
			||||||
 * and timestamp (in milliseconds).
 | 
					 * and timestamp (in milliseconds).
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
 | 
					- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
 | 
				
			||||||
                       timestampMs:(NSInteger)timestampMs;
 | 
					           timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -17,8 +17,8 @@
 | 
				
			||||||
@implementation MPPObjectDetectionResult
 | 
					@implementation MPPObjectDetectionResult
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
 | 
					- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
 | 
				
			||||||
                       timestampMs:(NSInteger)timestampMs {
 | 
					           timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
 | 
				
			||||||
  self = [super initWithTimestampMs:timestampMs];
 | 
					  self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
  if (self) {
 | 
					  if (self) {
 | 
				
			||||||
    _detections = detections;
 | 
					    _detections = detections;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -138,8 +138,8 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 * `MPPRunningModeVideo`.
 | 
					 * `MPPRunningModeVideo`.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image The `MPPImage` on which object detection is to be performed.
 | 
					 * @param image The `MPPImage` on which object detection is to be performed.
 | 
				
			||||||
 * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
 | 
					 * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
 | 
				
			||||||
 * monotonically increasing.
 | 
					 * timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing object
 | 
					 * @param error An optional error parameter populated when there is an error in performing object
 | 
				
			||||||
 * detection on the input image.
 | 
					 * detection on the input image.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
| 
						 | 
					@ -149,9 +149,9 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 * image data.
 | 
					 * image data.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                                    error:(NSError **)error
 | 
					                                                    error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(detect(videoFrame:timestampMs:));
 | 
					    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Performs object detection on the provided video frame of type `MPPImage` cropped to the
 | 
					 * Performs object detection on the provided video frame of type `MPPImage` cropped to the
 | 
				
			||||||
| 
						 | 
					@ -164,8 +164,8 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
 | 
					 * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
 | 
				
			||||||
 * monotonically increasing.
 | 
					 * timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
 | 
					 * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
 | 
				
			||||||
 * object detection should be performed.
 | 
					 * object detection should be performed.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
| 
						 | 
					@ -178,10 +178,10 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 * image data.
 | 
					 * image data.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                         regionOfInterest:(CGRect)roi
 | 
					                                         regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                    error:(NSError **)error
 | 
					                                                    error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(detect(videoFrame:timestampMs:regionOfInterest:));
 | 
					    NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:regionOfInterest:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Sends live stream image data of type `MPPImage` to perform object detection using the whole
 | 
					 * Sends live stream image data of type `MPPImage` to perform object detection using the whole
 | 
				
			||||||
| 
						 | 
					@ -195,16 +195,17 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
 | 
				
			||||||
 * to the object detector. The input timestamps must be monotonically increasing.
 | 
					 * image is sent to the object detector. The input timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing object
 | 
					 * @param error An optional error parameter populated when there is an error in performing object
 | 
				
			||||||
 * detection on the input live stream image data.
 | 
					 * detection on the input live stream image data.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
					 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
					- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                     error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampMs:));
 | 
					                      error:(NSError **)error
 | 
				
			||||||
 | 
					    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Sends live stream image data of type `MPPImage` to perform object detection, cropped to the
 | 
					 * Sends live stream image data of type `MPPImage` to perform object detection, cropped to the
 | 
				
			||||||
| 
						 | 
					@ -218,8 +219,8 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
					 * @param image A live stream image data of type `MPPImage` on which object detection is to be
 | 
				
			||||||
 * performed.
 | 
					 * performed.
 | 
				
			||||||
 * @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
 | 
					 * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
 | 
				
			||||||
 * to the object detector. The input timestamps must be monotonically increasing.
 | 
					 * image is sent to the object detector. The input timestamps must be monotonically increasing.
 | 
				
			||||||
 * @param roi A `CGRect` specifying the region of interest within the given live stream image data
 | 
					 * @param roi A `CGRect` specifying the region of interest within the given live stream image data
 | 
				
			||||||
 * of type `MPPImage`, on which iobject detection should be performed.
 | 
					 * of type `MPPImage`, on which iobject detection should be performed.
 | 
				
			||||||
 * @param error An optional error parameter populated when there is an error in performing object
 | 
					 * @param error An optional error parameter populated when there is an error in performing object
 | 
				
			||||||
| 
						 | 
					@ -228,10 +229,10 @@ NS_SWIFT_NAME(ObjectDetector)
 | 
				
			||||||
 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
					 * @return `YES` if the image was sent to the task successfully, otherwise `NO`.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
					- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
           regionOfInterest:(CGRect)roi
 | 
					           regionOfInterest:(CGRect)roi
 | 
				
			||||||
                      error:(NSError **)error
 | 
					                      error:(NSError **)error
 | 
				
			||||||
    NS_SWIFT_NAME(detectAsync(image:timestampMs:regionOfInterest:));
 | 
					    NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:regionOfInterest:));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (instancetype)init NS_UNAVAILABLE;
 | 
					- (instancetype)init NS_UNAVAILABLE;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -157,7 +157,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
 | 
					- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
 | 
				
			||||||
                                           timestampMs:(NSInteger)timestampMs
 | 
					                               timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                      regionOfInterest:(CGRect)roi
 | 
					                                      regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                 error:(NSError **)error {
 | 
					                                                 error:(NSError **)error {
 | 
				
			||||||
  std::optional<NormalizedRect> rect =
 | 
					  std::optional<NormalizedRect> rect =
 | 
				
			||||||
| 
						 | 
					@ -170,14 +170,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
 | 
					  Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
 | 
				
			||||||
                                                            timestampMs:timestampMs
 | 
					                                                timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                                  error:error];
 | 
					                                                                  error:error];
 | 
				
			||||||
  if (imagePacket.IsEmpty()) {
 | 
					  if (imagePacket.IsEmpty()) {
 | 
				
			||||||
    return std::nullopt;
 | 
					    return std::nullopt;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
 | 
					  Packet normalizedRectPacket =
 | 
				
			||||||
                                                                           timestampMs:timestampMs];
 | 
					      [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
 | 
				
			||||||
 | 
					                                     timestampInMilliseconds:timestampInMilliseconds];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
 | 
					  PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
 | 
				
			||||||
  return inputPacketMap;
 | 
					  return inputPacketMap;
 | 
				
			||||||
| 
						 | 
					@ -188,11 +189,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                         regionOfInterest:(CGRect)roi
 | 
					                                         regionOfInterest:(CGRect)roi
 | 
				
			||||||
                                                    error:(NSError **)error {
 | 
					                                                    error:(NSError **)error {
 | 
				
			||||||
  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
					  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
				
			||||||
                                                                 timestampMs:timestampMs
 | 
					                                                     timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                            regionOfInterest:roi
 | 
					                                                            regionOfInterest:roi
 | 
				
			||||||
                                                                       error:error];
 | 
					                                                                       error:error];
 | 
				
			||||||
  if (!inputPacketMap.has_value()) {
 | 
					  if (!inputPacketMap.has_value()) {
 | 
				
			||||||
| 
						 | 
					@ -212,20 +213,20 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
					- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
 | 
				
			||||||
                                              timestampMs:(NSInteger)timestampMs
 | 
					                                  timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                                                    error:(NSError **)error {
 | 
					                                                    error:(NSError **)error {
 | 
				
			||||||
  return [self detectInVideoFrame:image
 | 
					  return [self detectInVideoFrame:image
 | 
				
			||||||
                      timestampMs:timestampMs
 | 
					          timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                 regionOfInterest:CGRectZero
 | 
					                 regionOfInterest:CGRectZero
 | 
				
			||||||
                            error:error];
 | 
					                            error:error];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
					- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
           regionOfInterest:(CGRect)roi
 | 
					           regionOfInterest:(CGRect)roi
 | 
				
			||||||
                      error:(NSError **)error {
 | 
					                      error:(NSError **)error {
 | 
				
			||||||
  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
					  std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
 | 
				
			||||||
                                                                 timestampMs:timestampMs
 | 
					                                                     timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                                                            regionOfInterest:roi
 | 
					                                                            regionOfInterest:roi
 | 
				
			||||||
                                                                       error:error];
 | 
					                                                                       error:error];
 | 
				
			||||||
  if (!inputPacketMap.has_value()) {
 | 
					  if (!inputPacketMap.has_value()) {
 | 
				
			||||||
| 
						 | 
					@ -236,10 +237,10 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
					- (BOOL)detectAsyncInImage:(MPPImage *)image
 | 
				
			||||||
               timestampMs:(NSInteger)timestampMs
 | 
					    timestampInMilliseconds:(NSInteger)timestampInMilliseconds
 | 
				
			||||||
                      error:(NSError **)error {
 | 
					                      error:(NSError **)error {
 | 
				
			||||||
  return [self detectAsyncInImage:image
 | 
					  return [self detectAsyncInImage:image
 | 
				
			||||||
                      timestampMs:timestampMs
 | 
					          timestampInMilliseconds:timestampInMilliseconds
 | 
				
			||||||
                 regionOfInterest:CGRectZero
 | 
					                 regionOfInterest:CGRectZero
 | 
				
			||||||
                            error:error];
 | 
					                            error:error];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -39,7 +39,8 @@ using ::mediapipe::Packet;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return [[MPPObjectDetectionResult alloc]
 | 
					  return [[MPPObjectDetectionResult alloc]
 | 
				
			||||||
           initWithDetections:detections
 | 
					           initWithDetections:detections
 | 
				
			||||||
             timestampMs:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)];
 | 
					      timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
 | 
				
			||||||
 | 
					                                          kMicroSecondsPerMilliSecond)];
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@end
 | 
					@end
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue
	
	Block a user