diff --git a/mediapipe/examples/ios/objecttrackinggpu/BUILD b/mediapipe/examples/ios/objecttrackinggpu/BUILD
new file mode 100644
index 000000000..3244d4587
--- /dev/null
+++ b/mediapipe/examples/ios/objecttrackinggpu/BUILD
@@ -0,0 +1,70 @@
+# Copyright 2019 The MediaPipe Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(
+ "@build_bazel_rules_apple//apple:ios.bzl",
+ "ios_application",
+)
+load(
+ "//mediapipe/examples/ios:bundle_id.bzl",
+ "BUNDLE_ID_PREFIX",
+ "example_provisioning",
+)
+
+licenses(["notice"])
+
+MIN_IOS_VERSION = "10.0"
+
+alias(
+ name = "objectrackinggpu",
+ actual = "ObjectTrackingGpuApp",
+)
+
+ios_application(
+ name = "ObjectTrackingGpuApp",
+ app_icons = ["//mediapipe/examples/ios/common:AppIcon"],
+ bundle_id = BUNDLE_ID_PREFIX + ".ObjectTrackingGpu",
+ families = [
+ "iphone",
+ "ipad",
+ ],
+ infoplists = [
+ "//mediapipe/examples/ios/common:Info.plist",
+ "Info.plist",
+ ],
+ minimum_os_version = MIN_IOS_VERSION,
+ provisioning_profile = example_provisioning(),
+ deps = [
+ ":ObjectTrackingGpuAppLibrary",
+ "@ios_opencv//:OpencvFramework",
+ ],
+)
+
+objc_library(
+ name = "ObjectTrackingGpuAppLibrary",
+ data = [
+ "//mediapipe/graphs/tracking:mobile_gpu.binarypb",
+ "//mediapipe/models:ssdlite_object_detection.tflite",
+ "//mediapipe/models:ssdlite_object_detection_labelmap.txt",
+ ],
+ deps = [
+ "//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
+ ] + select({
+ "//mediapipe:ios_i386": [],
+ "//mediapipe:ios_x86_64": [],
+ "//conditions:default": [
+ "//mediapipe/graphs/tracking:mobile_calculators",
+ ],
+ }),
+)
diff --git a/mediapipe/examples/ios/objecttrackinggpu/Info.plist b/mediapipe/examples/ios/objecttrackinggpu/Info.plist
new file mode 100644
index 000000000..6b4790734
--- /dev/null
+++ b/mediapipe/examples/ios/objecttrackinggpu/Info.plist
@@ -0,0 +1,14 @@
+
+
+
+
+ CameraPosition
+ front
+ GraphOutputStream
+ output_video
+ GraphInputStream
+ input_video
+ GraphName
+ mobile_gpu
+
+
diff --git a/mediapipe/objc/MPPGraph.mm b/mediapipe/objc/MPPGraph.mm
index dec76047e..94b8b2667 100644
--- a/mediapipe/objc/MPPGraph.mm
+++ b/mediapipe/objc/MPPGraph.mm
@@ -46,7 +46,6 @@
/// Number of frames currently being processed by the graph.
std::atomic _framesInFlight;
/// Used as a sequential timestamp for MediaPipe.
- mediapipe::Timestamp _frameTimestamp;
int64 _frameNumber;
// Graph config modified to expose requested output streams.
@@ -369,21 +368,16 @@ void CallFrameDelegate(void* wrapperVoid, const std::string& streamName,
timestamp:timestamp
allowOverwrite:NO];
}
-
- (BOOL)sendPixelBuffer:(CVPixelBufferRef)imageBuffer
intoStream:(const std::string&)inputName
packetType:(MPPPacketType)packetType {
- _GTMDevAssert(_frameTimestamp < mediapipe::Timestamp::Done(),
- @"Trying to send frame after stream is done.");
- if (_frameTimestamp < mediapipe::Timestamp::Min()) {
- _frameTimestamp = mediapipe::Timestamp::Min();
- } else {
- _frameTimestamp++;
- }
+ uint64_t us = std::chrono::duration_cast(std::chrono::high_resolution_clock::
+ now().time_since_epoch()).count();
return [self sendPixelBuffer:imageBuffer
intoStream:inputName
packetType:packetType
- timestamp:_frameTimestamp];
+ timestamp:mediapipe::Timestamp(us)];
+
}
- (void)debugPrintGlInfo {