From cf6b98537fe26d8b7e05a1c69682674d777b3aa9 Mon Sep 17 00:00:00 2001 From: aaaron7 Date: Fri, 6 Nov 2020 10:50:08 +0800 Subject: [PATCH] Use video source timestamp instead of system clock. --- .../examples/ios/common/CommonViewController.h | 4 ++++ .../examples/ios/common/CommonViewController.mm | 4 +++- mediapipe/objc/MPPGraph.mm | 17 ++++++++++++----- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/mediapipe/examples/ios/common/CommonViewController.h b/mediapipe/examples/ios/common/CommonViewController.h index b4650423b..a463a1033 100644 --- a/mediapipe/examples/ios/common/CommonViewController.h +++ b/mediapipe/examples/ios/common/CommonViewController.h @@ -18,6 +18,7 @@ #import "mediapipe/objc/MPPGraph.h" #import "mediapipe/objc/MPPLayerRenderer.h" #import "mediapipe/objc/MPPPlayerInputSource.h" +#import "mediapipe/objc/MPPTimestampConverter.h" typedef NS_ENUM(NSInteger, MediaPipeDemoSourceMode) { MediaPipeDemoSourceCamera, @@ -60,4 +61,7 @@ typedef NS_ENUM(NSInteger, MediaPipeDemoSourceMode) { // Graph output stream. @property(nonatomic) const char* graphOutputStream; +// Convert video time to mediapipe internal Timestamp +@property(nonatomic) MPPTimestampConverter* timestampConverter; + @end diff --git a/mediapipe/examples/ios/common/CommonViewController.mm b/mediapipe/examples/ios/common/CommonViewController.mm index aa7eb5d57..89a31841e 100644 --- a/mediapipe/examples/ios/common/CommonViewController.mm +++ b/mediapipe/examples/ios/common/CommonViewController.mm @@ -76,6 +76,7 @@ static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue"; self.renderer.layer.frame = self.liveView.layer.bounds; [self.liveView.layer addSublayer:self.renderer.layer]; self.renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop; + self.timestampConverter = [[MPPTimestampConverter alloc] init]; dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class( DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0); @@ -173,7 +174,8 @@ static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue"; [self.mediapipeGraph sendPixelBuffer:imageBuffer intoStream:self.graphInputStream - packetType:MPPPacketTypePixelBuffer]; + packetType:MPPPacketTypePixelBuffer + timestamp:[self.timestampConverter timestampForMediaTime:timestamp]]; } #pragma mark - MPPGraphDelegate methods diff --git a/mediapipe/objc/MPPGraph.mm b/mediapipe/objc/MPPGraph.mm index 67f3a16e6..da2fb96bc 100644 --- a/mediapipe/objc/MPPGraph.mm +++ b/mediapipe/objc/MPPGraph.mm @@ -45,6 +45,8 @@ /// Number of frames currently being processed by the graph. std::atomic _framesInFlight; + /// Used as a sequential timestamp for MediaPipe. + mediapipe::Timestamp _frameTimestamp; int64 _frameNumber; // Graph config modified to expose requested output streams. @@ -367,16 +369,21 @@ void CallFrameDelegate(void* wrapperVoid, const std::string& streamName, timestamp:timestamp allowOverwrite:NO]; } + - (BOOL)sendPixelBuffer:(CVPixelBufferRef)imageBuffer intoStream:(const std::string&)inputName packetType:(MPPPacketType)packetType { - uint64_t us = std::chrono::duration_cast(std::chrono::high_resolution_clock:: - now().time_since_epoch()).count(); + _GTMDevAssert(_frameTimestamp < mediapipe::Timestamp::Done(), + @"Trying to send frame after stream is done."); + if (_frameTimestamp < mediapipe::Timestamp::Min()) { + _frameTimestamp = mediapipe::Timestamp::Min(); + } else { + _frameTimestamp++; + } return [self sendPixelBuffer:imageBuffer intoStream:inputName packetType:packetType - timestamp:mediapipe::Timestamp(us)]; - + timestamp:_frameTimestamp]; } - (void)debugPrintGlInfo { @@ -398,4 +405,4 @@ void CallFrameDelegate(void* wrapperVoid, const std::string& streamName, NSLog(@"%@", oneExtension); } -@end +@end \ No newline at end of file