217 lines
8.1 KiB
Plaintext
217 lines
8.1 KiB
Plaintext
// Copyright 2019 The MediaPipe Authors.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
#import "ViewController.h"
|
|
|
|
#import "mediapipe/objc/MPPCameraInputSource.h"
|
|
#import "mediapipe/objc/MPPGraph.h"
|
|
#import "mediapipe/objc/MPPLayerRenderer.h"
|
|
|
|
#include "mediapipe/framework/formats/landmark.pb.h"
|
|
|
|
static NSString* const kGraphName = @"iris_tracking_gpu";
|
|
|
|
static const char* kInputStream = "input_video";
|
|
static const char* kOutputStream = "output_video";
|
|
static const char* kLandmarksOutputStream = "iris_landmarks";
|
|
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
|
|
|
|
@interface ViewController () <MPPGraphDelegate, MPPInputSourceDelegate>
|
|
|
|
// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in viewWillAppear: and
|
|
// sent video frames on _videoQueue.
|
|
@property(nonatomic) MPPGraph* mediapipeGraph;
|
|
|
|
@end
|
|
|
|
@implementation ViewController {
|
|
/// Handles camera access via AVCaptureSession library.
|
|
MPPCameraInputSource* _cameraSource;
|
|
/// Input side packet for focal length parameter.
|
|
std::map<std::string, mediapipe::Packet> _input_side_packets;
|
|
mediapipe::Packet _focal_length_side_packet;
|
|
|
|
/// Inform the user when camera is unavailable.
|
|
IBOutlet UILabel* _noCameraLabel;
|
|
/// Display the camera preview frames.
|
|
IBOutlet UIView* _liveView;
|
|
/// Render frames in a layer.
|
|
MPPLayerRenderer* _renderer;
|
|
|
|
/// Process camera frames on this queue.
|
|
dispatch_queue_t _videoQueue;
|
|
}
|
|
|
|
#pragma mark - Cleanup methods
|
|
|
|
- (void)dealloc {
|
|
self.mediapipeGraph.delegate = nil;
|
|
[self.mediapipeGraph cancel];
|
|
// Ignore errors since we're cleaning up.
|
|
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
|
|
[self.mediapipeGraph waitUntilDoneWithError:nil];
|
|
}
|
|
|
|
#pragma mark - MediaPipe graph methods
|
|
|
|
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
|
|
// Load the graph config resource.
|
|
NSError* configLoadError = nil;
|
|
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
|
|
if (!resource || resource.length == 0) {
|
|
return nil;
|
|
}
|
|
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
|
|
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
|
|
if (!data) {
|
|
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
|
|
return nil;
|
|
}
|
|
|
|
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
|
|
mediapipe::CalculatorGraphConfig config;
|
|
config.ParseFromArray(data.bytes, data.length);
|
|
|
|
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
|
|
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
|
|
[newGraph addFrameOutputStream:kOutputStream outputPacketType:MPPPacketTypePixelBuffer];
|
|
[newGraph addFrameOutputStream:kLandmarksOutputStream outputPacketType:MPPPacketTypeRaw];
|
|
return newGraph;
|
|
}
|
|
|
|
#pragma mark - UIViewController methods
|
|
|
|
- (void)viewDidLoad {
|
|
[super viewDidLoad];
|
|
|
|
_renderer = [[MPPLayerRenderer alloc] init];
|
|
_renderer.layer.frame = _liveView.layer.bounds;
|
|
[_liveView.layer addSublayer:_renderer.layer];
|
|
_renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
|
|
|
|
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
|
|
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
|
|
_videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
|
|
|
|
_cameraSource = [[MPPCameraInputSource alloc] init];
|
|
[_cameraSource setDelegate:self queue:_videoQueue];
|
|
_cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
|
|
_cameraSource.cameraPosition = AVCaptureDevicePositionFront;
|
|
// The frame's native format is rotated with respect to the portrait orientation.
|
|
_cameraSource.orientation = AVCaptureVideoOrientationPortrait;
|
|
// When using the front camera, mirror the input for a more natural look.
|
|
_cameraSource.videoMirrored = YES;
|
|
|
|
self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName];
|
|
self.mediapipeGraph.delegate = self;
|
|
// Set maxFramesInFlight to a small value to avoid memory contention for real-time processing.
|
|
self.mediapipeGraph.maxFramesInFlight = 2;
|
|
|
|
_focal_length_side_packet =
|
|
mediapipe::MakePacket<std::unique_ptr<float>>(absl::make_unique<float>(0.0));
|
|
_input_side_packets = {
|
|
{"focal_length_pixel", _focal_length_side_packet},
|
|
};
|
|
[self.mediapipeGraph addSidePackets:_input_side_packets];
|
|
}
|
|
|
|
// In this application, there is only one ViewController which has no navigation to other view
|
|
// controllers, and there is only one View with live display showing the result of running the
|
|
// MediaPipe graph on the live video feed. If more view controllers are needed later, the graph
|
|
// setup/teardown and camera start/stop logic should be updated appropriately in response to the
|
|
// appearance/disappearance of this ViewController, as viewWillAppear: can be invoked multiple times
|
|
// depending on the application navigation flow in that case.
|
|
- (void)viewWillAppear:(BOOL)animated {
|
|
[super viewWillAppear:animated];
|
|
|
|
[_cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
|
|
if (granted) {
|
|
[self startGraphAndCamera];
|
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
_noCameraLabel.hidden = YES;
|
|
});
|
|
}
|
|
}];
|
|
}
|
|
|
|
- (void)startGraphAndCamera {
|
|
// Start running self.mediapipeGraph.
|
|
NSError* error;
|
|
if (![self.mediapipeGraph startWithError:&error]) {
|
|
NSLog(@"Failed to start graph: %@", error);
|
|
}
|
|
|
|
// Start fetching frames from the camera.
|
|
dispatch_async(_videoQueue, ^{
|
|
[_cameraSource start];
|
|
});
|
|
}
|
|
|
|
#pragma mark - MPPGraphDelegate methods
|
|
|
|
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
|
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
|
fromStream:(const std::string&)streamName {
|
|
if (streamName == kOutputStream) {
|
|
// Display the captured image on the screen.
|
|
CVPixelBufferRetain(pixelBuffer);
|
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
[_renderer renderPixelBuffer:pixelBuffer];
|
|
CVPixelBufferRelease(pixelBuffer);
|
|
});
|
|
}
|
|
}
|
|
|
|
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
|
|
- (void)mediapipeGraph:(MPPGraph*)graph
|
|
didOutputPacket:(const ::mediapipe::Packet&)packet
|
|
fromStream:(const std::string&)streamName {
|
|
if (streamName == kLandmarksOutputStream) {
|
|
if (packet.IsEmpty()) {
|
|
NSLog(@"[TS:%lld] No iris landmarks", packet.Timestamp().Value());
|
|
return;
|
|
}
|
|
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
|
|
NSLog(@"[TS:%lld] Number of landmarks on iris: %d", packet.Timestamp().Value(),
|
|
landmarks.landmark_size());
|
|
for (int i = 0; i < landmarks.landmark_size(); ++i) {
|
|
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
|
|
landmarks.landmark(i).y(), landmarks.landmark(i).z());
|
|
}
|
|
}
|
|
}
|
|
|
|
#pragma mark - MPPInputSourceDelegate methods
|
|
|
|
// Must be invoked on _videoQueue.
|
|
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
|
|
timestamp:(CMTime)timestamp
|
|
fromSource:(MPPInputSource*)source {
|
|
if (source != _cameraSource) {
|
|
NSLog(@"Unknown source: %@", source);
|
|
return;
|
|
}
|
|
|
|
// TODO: This is a temporary solution. Need to verify whether the focal length is
|
|
// constant. In that case, we need to use input stream instead of using side packet.
|
|
*(_input_side_packets["focal_length_pixel"].Get<std::unique_ptr<float>>()) =
|
|
_cameraSource.cameraIntrinsicMatrix.columns[0][0];
|
|
[self.mediapipeGraph sendPixelBuffer:imageBuffer
|
|
intoStream:kInputStream
|
|
packetType:MPPPacketTypePixelBuffer];
|
|
}
|
|
|
|
@end
|