Updated all references of timestampMs to timestampInMilliseconds
This commit is contained in:
parent
3fdd7f3618
commit
2c154e20cc
|
@ -90,7 +90,7 @@ NS_SWIFT_NAME(ClassificationResult)
|
|||
* amount of data to process might exceed the maximum size that the model can process: to solve
|
||||
* this, the input data is split into multiple chunks starting at different timestamps.
|
||||
*/
|
||||
@property(nonatomic, readonly) NSInteger timestampMs;
|
||||
@property(nonatomic, readonly) NSInteger timestampInMilliseconds;
|
||||
|
||||
/**
|
||||
* Initializes a new `MPPClassificationResult` with the given array of classifications and time
|
||||
|
@ -98,14 +98,15 @@ NS_SWIFT_NAME(ClassificationResult)
|
|||
*
|
||||
* @param classifications An Array of `MPPClassifications` objects containing the predicted
|
||||
* categories for each head of the model.
|
||||
* @param timestampMs The timestamp (in milliseconds) of the start of the chunk of data
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data
|
||||
* corresponding to these results.
|
||||
*
|
||||
* @return An instance of `MPPClassificationResult` initialized with the given array of
|
||||
* classifications and timestampMs.
|
||||
* classifications and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
|
||||
timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
|
|
|
@ -38,11 +38,11 @@
|
|||
@implementation MPPClassificationResult
|
||||
|
||||
- (instancetype)initWithClassifications:(NSArray<MPPClassifications *> *)classifications
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_classifications = classifications;
|
||||
_timestampMs = timestampMs;
|
||||
_timestampInMilliseconds = timestampInMilliseconds;
|
||||
}
|
||||
|
||||
return self;
|
||||
|
|
|
@ -33,7 +33,7 @@ NS_SWIFT_NAME(EmbeddingResult)
|
|||
* cases, the amount of data to process might exceed the maximum size that the model can process. To
|
||||
* solve this, the input data is split into multiple chunks starting at different timestamps.
|
||||
*/
|
||||
@property(nonatomic, readonly) NSInteger timestampMs;
|
||||
@property(nonatomic, readonly) NSInteger timestampInMilliseconds;
|
||||
|
||||
/**
|
||||
* Initializes a new `MPPEmbedding` with the given array of embeddings and timestamp (in
|
||||
|
@ -41,14 +41,14 @@ NS_SWIFT_NAME(EmbeddingResult)
|
|||
*
|
||||
* @param embeddings An Array of `MPPEmbedding` objects containing the embedding results for each
|
||||
* head of the model.
|
||||
* @param timestampMs The optional timestamp (in milliseconds) of the start of the chunk of data
|
||||
* corresponding to these results. Pass `0` if timestamp is absent.
|
||||
* @param timestampInMilliseconds The optional timestamp (in milliseconds) of the start of the chunk
|
||||
* of data corresponding to these results. Pass `0` if timestamp is absent.
|
||||
*
|
||||
* @return An instance of `MPPEmbeddingResult` initialized with the given array of embeddings and
|
||||
* timestampMs.
|
||||
* timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
|
||||
timestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
|
|
|
@ -17,11 +17,11 @@
|
|||
@implementation MPPEmbeddingResult
|
||||
|
||||
- (instancetype)initWithEmbeddings:(NSArray<MPPEmbedding *> *)embeddings
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_embeddings = embeddings;
|
||||
_timestampMs = timestampMs;
|
||||
_timestampInMilliseconds = timestampInMilliseconds;
|
||||
}
|
||||
|
||||
return self;
|
||||
|
|
|
@ -55,13 +55,13 @@ using ClassificationResultProto =
|
|||
[classifications addObject:[MPPClassifications classificationsWithProto:classificationsProto]];
|
||||
}
|
||||
|
||||
NSInteger timestampMs = 0;
|
||||
NSInteger timestampInMilliseconds = 0;
|
||||
if (classificationResultProto.has_timestamp_ms()) {
|
||||
timestampMs = (NSInteger)classificationResultProto.timestamp_ms();
|
||||
timestampInMilliseconds = (NSInteger)classificationResultProto.timestamp_ms();
|
||||
}
|
||||
|
||||
return [[MPPClassificationResult alloc] initWithClassifications:classifications
|
||||
timestampMs:timestampMs];
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,12 +31,13 @@ using EmbeddingResultProto = ::mediapipe::tasks::components::containers::proto::
|
|||
[embeddings addObject:[MPPEmbedding embeddingWithProto:embeddingProto]];
|
||||
}
|
||||
|
||||
NSInteger timestampMs = 0;
|
||||
NSInteger timestampInMilliseconds = 0;
|
||||
if (embeddingResultProto.has_timestamp_ms()) {
|
||||
timestampMs = (NSInteger)embeddingResultProto.timestamp_ms();
|
||||
timestampInMilliseconds = (NSInteger)embeddingResultProto.timestamp_ms();
|
||||
}
|
||||
|
||||
return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings timestampMs:timestampMs];
|
||||
return [[MPPEmbeddingResult alloc] initWithEmbeddings:embeddings
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
|
@ -26,11 +26,12 @@ NS_SWIFT_NAME(TaskResult)
|
|||
/**
|
||||
* Timestamp that is associated with the task result object.
|
||||
*/
|
||||
@property(nonatomic, assign, readonly) NSInteger timestampMs;
|
||||
@property(nonatomic, assign, readonly) NSInteger timestampInMilliseconds;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs NS_DESIGNATED_INITIALIZER;
|
||||
- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -16,16 +16,16 @@
|
|||
|
||||
@implementation MPPTaskResult
|
||||
|
||||
- (instancetype)initWithTimestampMs:(NSInteger)timestampMs {
|
||||
- (instancetype)initWithTimestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_timestampMs = timestampMs;
|
||||
_timestampInMilliseconds = timestampInMilliseconds;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)copyWithZone:(NSZone *)zone {
|
||||
return [[MPPTaskResult alloc] initWithTimestampMs:self.timestampMs];
|
||||
return [[MPPTaskResult alloc] initWithTimestampInMilliseconds:self.timestampInMilliseconds];
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
|
@ -487,7 +487,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
|
||||
NSError *liveStreamApiCallError;
|
||||
XCTAssertFalse([imageClassifier classifyAsyncImage:image
|
||||
timestampMs:0
|
||||
timestampInMilliseconds:0
|
||||
error:&liveStreamApiCallError]);
|
||||
|
||||
NSError *expectedLiveStreamApiCallError =
|
||||
|
@ -501,7 +501,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError);
|
||||
|
||||
NSError *videoApiCallError;
|
||||
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
|
||||
XCTAssertFalse([imageClassifier classifyVideoFrame:image
|
||||
timestampInMilliseconds:0
|
||||
error:&videoApiCallError]);
|
||||
|
||||
NSError *expectedVideoApiCallError =
|
||||
[NSError errorWithDomain:kExpectedErrorDomain
|
||||
|
@ -524,7 +526,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
|
||||
NSError *liveStreamApiCallError;
|
||||
XCTAssertFalse([imageClassifier classifyAsyncImage:image
|
||||
timestampMs:0
|
||||
timestampInMilliseconds:0
|
||||
error:&liveStreamApiCallError]);
|
||||
|
||||
NSError *expectedLiveStreamApiCallError =
|
||||
|
@ -575,7 +577,9 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
AssertEqualErrors(imageApiCallError, expectedImageApiCallError);
|
||||
|
||||
NSError *videoApiCallError;
|
||||
XCTAssertFalse([imageClassifier classifyVideoFrame:image timestampMs:0 error:&videoApiCallError]);
|
||||
XCTAssertFalse([imageClassifier classifyVideoFrame:image
|
||||
timestampInMilliseconds:0
|
||||
error:&videoApiCallError]);
|
||||
|
||||
NSError *expectedVideoApiCallError =
|
||||
[NSError errorWithDomain:kExpectedErrorDomain
|
||||
|
@ -601,7 +605,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
MPPImageClassifierResult *imageClassifierResult = [imageClassifier classifyVideoFrame:image
|
||||
timestampMs:i
|
||||
timestampInMilliseconds:i
|
||||
error:nil];
|
||||
[self assertImageClassifierResult:imageClassifierResult
|
||||
hasExpectedCategoriesCount:maxResults
|
||||
|
@ -630,10 +634,10 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
|
||||
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
|
||||
|
||||
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:1 error:nil]);
|
||||
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:1 error:nil]);
|
||||
|
||||
NSError *error;
|
||||
XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampMs:0 error:&error]);
|
||||
XCTAssertFalse([imageClassifier classifyAsyncImage:image timestampInMilliseconds:0 error:&error]);
|
||||
|
||||
NSError *expectedError =
|
||||
[NSError errorWithDomain:kExpectedErrorDomain
|
||||
|
@ -668,7 +672,7 @@ static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks";
|
|||
MPPImage *image = [self imageWithFileInfo:kBurgerImage];
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampMs:i error:nil]);
|
||||
XCTAssertTrue([imageClassifier classifyAsyncImage:image timestampInMilliseconds:i error:nil]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextClassifierResult)
|
|||
*
|
||||
* @param classificationResult The `MPPClassificationResult` instance containing one set of results
|
||||
* per classifier head.
|
||||
* @param timestampMs The timestamp for this result.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
|
||||
*
|
||||
* @return An instance of `MPPTextClassifierResult` initialized with the given
|
||||
* `MPPClassificationResult` and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
|
||||
timestampMs:(NSInteger)timestampMs;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
@implementation MPPTextClassifierResult
|
||||
|
||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
self = [super initWithTimestampMs:timestampMs];
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_classificationResult = classificationResult;
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ using ::mediapipe::Packet;
|
|||
|
||||
return [[MPPTextClassifierResult alloc]
|
||||
initWithClassificationResult:classificationResult
|
||||
timestampMs:(NSInteger)(packet.Timestamp().Value() /
|
||||
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
|
||||
kMicroSecondsPerMilliSecond)];
|
||||
}
|
||||
|
||||
|
|
|
@ -31,13 +31,13 @@ NS_SWIFT_NAME(TextEmbedderResult)
|
|||
*
|
||||
* @param embeddingResult The `MPPEmbeddingResult` instance containing one set of results
|
||||
* per classifier head.
|
||||
* @param timestampMs The timestamp for this result.
|
||||
* @param timestampInMilliseconds The timestamp (in millisecondss) for this result.
|
||||
*
|
||||
* @return An instance of `MPPTextEmbedderResult` initialized with the given
|
||||
* `MPPEmbeddingResult` and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
|
||||
timestampMs:(NSInteger)timestampMs;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
@implementation MPPTextEmbedderResult
|
||||
|
||||
- (instancetype)initWithEmbeddingResult:(MPPEmbeddingResult *)embeddingResult
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
self = [super initWithTimestampMs:timestampMs];
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_embeddingResult = embeddingResult;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ using ::mediapipe::Packet;
|
|||
|
||||
return [[MPPTextEmbedderResult alloc]
|
||||
initWithEmbeddingResult:embeddingResult
|
||||
timestampMs:(NSInteger)(packet.Timestamp().Value() /
|
||||
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
|
||||
kMicroSecondsPerMilliSecond)];
|
||||
}
|
||||
|
||||
|
|
|
@ -122,17 +122,17 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
* `MPPRunningModeVideo`.
|
||||
*
|
||||
* @param image The `MPPImage` on which image classification is to be performed.
|
||||
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
|
||||
* monotonically increasing.
|
||||
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
|
||||
* timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing image
|
||||
* classification on the input video frame.
|
||||
*
|
||||
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
|
||||
*/
|
||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(classify(videoFrame:timestampMs:));
|
||||
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:));
|
||||
|
||||
/**
|
||||
* Performs image classification on the provided video frame of type `MPPImage` cropped to the
|
||||
|
@ -145,8 +145,8 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which image classification is to be
|
||||
* performed.
|
||||
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
|
||||
* monotonically increasing.
|
||||
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
|
||||
* timestamps must be monotonically increasing.
|
||||
* @param roi A `CGRect` specifying the region of interest within the video frame of type
|
||||
* `MPPImage`, on which image classification should be performed.
|
||||
* @param error An optional error parameter populated when there is an error in performing image
|
||||
|
@ -155,10 +155,10 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
* @return An `MPPImageClassifierResult` object that contains a list of image classifications.
|
||||
*/
|
||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(classify(videoFrame:timestampMs:regionOfInterest:));
|
||||
NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:));
|
||||
|
||||
/**
|
||||
* Sends live stream image data of type `MPPImage` to perform image classification using the whole
|
||||
|
@ -172,16 +172,17 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which image classification is to be
|
||||
* performed.
|
||||
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
|
||||
* to the image classifier. The input timestamps must be monotonically increasing.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image is sent to the image classifier. The input timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing image
|
||||
* classification on the input live stream image data.
|
||||
*
|
||||
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
|
||||
*/
|
||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
error:(NSError **)error NS_SWIFT_NAME(classifyAsync(image:timestampMs:));
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:));
|
||||
|
||||
/**
|
||||
* Sends live stream image data of type `MPPImage` to perform image classification, cropped to the
|
||||
|
@ -195,8 +196,8 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which image classification is to be
|
||||
* performed.
|
||||
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
|
||||
* to the image classifier. The input timestamps must be monotonically increasing.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image is sent to the image classifier. The input timestamps must be monotonically increasing.
|
||||
* @param roi A `CGRect` specifying the region of interest within the given live stream image data
|
||||
* of type `MPPImage`, on which image classification should be performed.
|
||||
* @param error An optional error parameter populated when there is an error in performing image
|
||||
|
@ -205,10 +206,10 @@ NS_SWIFT_NAME(ImageClassifier)
|
|||
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
|
||||
*/
|
||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(classifyAsync(image:timestampMs:regionOfInterest:));
|
||||
NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:regionOfInterest:));
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ static NSString *const kTaskGraphName =
|
|||
}
|
||||
|
||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<NormalizedRect> rect =
|
||||
|
@ -162,14 +162,15 @@ static NSString *const kTaskGraphName =
|
|||
}
|
||||
|
||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
error:error];
|
||||
if (imagePacket.IsEmpty()) {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
||||
timestampMs:timestampMs];
|
||||
Packet normalizedRectPacket =
|
||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
|
||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
||||
return inputPacketMap;
|
||||
|
@ -180,11 +181,11 @@ static NSString *const kTaskGraphName =
|
|||
}
|
||||
|
||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:roi
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
|
@ -204,20 +205,20 @@ static NSString *const kTaskGraphName =
|
|||
}
|
||||
|
||||
- (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
return [self classifyVideoFrame:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:CGRectZero
|
||||
error:error];
|
||||
}
|
||||
|
||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:roi
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
|
@ -228,10 +229,10 @@ static NSString *const kTaskGraphName =
|
|||
}
|
||||
|
||||
- (BOOL)classifyAsyncImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
return [self classifyAsyncImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:CGRectZero
|
||||
error:error];
|
||||
}
|
||||
|
|
|
@ -31,13 +31,13 @@ NS_SWIFT_NAME(ImageClassifierResult)
|
|||
*
|
||||
* @param classificationResult The `MPPClassificationResult` instance containing one set of results
|
||||
* per classifier head.
|
||||
* @param timestampMs The timestamp for this result.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
|
||||
*
|
||||
* @return An instance of `MPPImageClassifierResult` initialized with the given
|
||||
* `MPPClassificationResult` and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
|
||||
timestampMs:(NSInteger)timestampMs;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
@implementation MPPImageClassifierResult
|
||||
|
||||
- (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
self = [super initWithTimestampMs:timestampMs];
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampInMilliseconds:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_classificationResult = classificationResult;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ using ::mediapipe::Packet;
|
|||
|
||||
return [[MPPImageClassifierResult alloc]
|
||||
initWithClassificationResult:classificationResult
|
||||
timestampMs:(NSInteger)(packet.Timestamp().Value() /
|
||||
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
|
||||
kMicroSecondsPerMilliSecond)];
|
||||
}
|
||||
|
||||
|
|
|
@ -36,13 +36,13 @@ NS_SWIFT_NAME(ObjectDetectionResult)
|
|||
* @param detections An array of `MPPDetection` objects each of which has a bounding box that is
|
||||
* expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width)
|
||||
* x [0,image_height)`, which are the dimensions of the underlying image data.
|
||||
* @param timestampMs The timestamp for this result.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) for this result.
|
||||
*
|
||||
* @return An instance of `MPPObjectDetectionResult` initialized with the given array of detections
|
||||
* and timestamp (in milliseconds).
|
||||
*/
|
||||
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
|
||||
timestampMs:(NSInteger)timestampMs;
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds;
|
||||
|
||||
@end
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
@implementation MPPObjectDetectionResult
|
||||
|
||||
- (instancetype)initWithDetections:(NSArray<MPPDetection *> *)detections
|
||||
timestampMs:(NSInteger)timestampMs {
|
||||
self = [super initWithTimestampMs:timestampMs];
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds {
|
||||
self = [super initWithTimestampMs:timestampInMilliseconds];
|
||||
if (self) {
|
||||
_detections = detections;
|
||||
}
|
||||
|
|
|
@ -138,8 +138,8 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
* `MPPRunningModeVideo`.
|
||||
*
|
||||
* @param image The `MPPImage` on which object detection is to be performed.
|
||||
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
|
||||
* monotonically increasing.
|
||||
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
|
||||
* timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing object
|
||||
* detection on the input image.
|
||||
*
|
||||
|
@ -149,9 +149,9 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
* image data.
|
||||
*/
|
||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(detect(videoFrame:timestampMs:));
|
||||
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:));
|
||||
|
||||
/**
|
||||
* Performs object detection on the provided video frame of type `MPPImage` cropped to the
|
||||
|
@ -164,8 +164,8 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which object detection is to be
|
||||
* performed.
|
||||
* @param timestampMs The video frame's timestamp (in milliseconds). The input timestamps must be
|
||||
* monotonically increasing.
|
||||
* @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input
|
||||
* timestamps must be monotonically increasing.
|
||||
* @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which
|
||||
* object detection should be performed.
|
||||
*
|
||||
|
@ -178,10 +178,10 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
* image data.
|
||||
*/
|
||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(detect(videoFrame:timestampMs:regionOfInterest:));
|
||||
NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:regionOfInterest:));
|
||||
|
||||
/**
|
||||
* Sends live stream image data of type `MPPImage` to perform object detection using the whole
|
||||
|
@ -195,16 +195,17 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which object detection is to be
|
||||
* performed.
|
||||
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
|
||||
* to the object detector. The input timestamps must be monotonically increasing.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image is sent to the object detector. The input timestamps must be monotonically increasing.
|
||||
* @param error An optional error parameter populated when there is an error in performing object
|
||||
* detection on the input live stream image data.
|
||||
*
|
||||
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
|
||||
*/
|
||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
error:(NSError **)error NS_SWIFT_NAME(detectAsync(image:timestampMs:));
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:));
|
||||
|
||||
/**
|
||||
* Sends live stream image data of type `MPPImage` to perform object detection, cropped to the
|
||||
|
@ -218,8 +219,8 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
*
|
||||
* @param image A live stream image data of type `MPPImage` on which object detection is to be
|
||||
* performed.
|
||||
* @param timestampMs The timestamp (in milliseconds) which indicates when the input image is sent
|
||||
* to the object detector. The input timestamps must be monotonically increasing.
|
||||
* @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input
|
||||
* image is sent to the object detector. The input timestamps must be monotonically increasing.
|
||||
* @param roi A `CGRect` specifying the region of interest within the given live stream image data
|
||||
* of type `MPPImage`, on which iobject detection should be performed.
|
||||
* @param error An optional error parameter populated when there is an error in performing object
|
||||
|
@ -228,10 +229,10 @@ NS_SWIFT_NAME(ObjectDetector)
|
|||
* @return `YES` if the image was sent to the task successfully, otherwise `NO`.
|
||||
*/
|
||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error
|
||||
NS_SWIFT_NAME(detectAsync(image:timestampMs:regionOfInterest:));
|
||||
NS_SWIFT_NAME(detectAsync(image:timestampInMilliseconds:regionOfInterest:));
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
|
|
|
@ -157,7 +157,7 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
|
|||
}
|
||||
|
||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<NormalizedRect> rect =
|
||||
|
@ -170,14 +170,15 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
|
|||
}
|
||||
|
||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
error:error];
|
||||
if (imagePacket.IsEmpty()) {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
||||
timestampMs:timestampMs];
|
||||
Packet normalizedRectPacket =
|
||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
||||
timestampInMilliseconds:timestampInMilliseconds];
|
||||
|
||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
||||
return inputPacketMap;
|
||||
|
@ -188,11 +189,11 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
|
|||
}
|
||||
|
||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:roi
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
|
@ -212,20 +213,20 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
|
|||
}
|
||||
|
||||
- (nullable MPPObjectDetectionResult *)detectInVideoFrame:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
return [self detectInVideoFrame:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:CGRectZero
|
||||
error:error];
|
||||
}
|
||||
|
||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
regionOfInterest:(CGRect)roi
|
||||
error:(NSError **)error {
|
||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:roi
|
||||
error:error];
|
||||
if (!inputPacketMap.has_value()) {
|
||||
|
@ -236,10 +237,10 @@ static NSString *const kTaskGraphName = @"mediapipe.tasks.vision.ObjectDetectorG
|
|||
}
|
||||
|
||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||
timestampMs:(NSInteger)timestampMs
|
||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||
error:(NSError **)error {
|
||||
return [self detectAsyncInImage:image
|
||||
timestampMs:timestampMs
|
||||
timestampInMilliseconds:timestampInMilliseconds
|
||||
regionOfInterest:CGRectZero
|
||||
error:error];
|
||||
}
|
||||
|
|
|
@ -39,7 +39,8 @@ using ::mediapipe::Packet;
|
|||
|
||||
return [[MPPObjectDetectionResult alloc]
|
||||
initWithDetections:detections
|
||||
timestampMs:(NSInteger)(packet.Timestamp().Value() / kMicroSecondsPerMilliSecond)];
|
||||
timestampInMilliseconds:(NSInteger)(packet.Timestamp().Value() /
|
||||
kMicroSecondsPerMilliSecond)];
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
Loading…
Reference in New Issue
Block a user