2019-08-17 03:49:25 +02:00
|
|
|
// Copyright 2019 The MediaPipe Authors.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
#import "MPPCameraInputSource.h"
|
|
|
|
|
|
|
|
#import <UIKit/UIKit.h>
|
|
|
|
|
|
|
|
@interface MPPCameraInputSource () <AVCaptureVideoDataOutputSampleBufferDelegate,
|
|
|
|
AVCaptureDepthDataOutputDelegate>
|
|
|
|
@end
|
|
|
|
|
|
|
|
@implementation MPPCameraInputSource {
|
|
|
|
AVCaptureSession* _session;
|
|
|
|
AVCaptureDeviceInput* _videoDeviceInput;
|
|
|
|
AVCaptureVideoDataOutput* _videoDataOutput;
|
2020-05-21 18:46:31 +02:00
|
|
|
AVCaptureDepthDataOutput* _depthDataOutput API_AVAILABLE(ios(11.0));
|
2020-02-10 22:27:13 +01:00
|
|
|
AVCaptureDevice* _currentDevice;
|
2019-08-17 03:49:25 +02:00
|
|
|
|
2020-01-10 02:51:05 +01:00
|
|
|
matrix_float3x3 _cameraIntrinsicMatrix;
|
2019-08-17 03:49:25 +02:00
|
|
|
OSType _pixelFormatType;
|
|
|
|
BOOL _autoRotateBuffers;
|
2020-01-10 02:51:05 +01:00
|
|
|
BOOL _didReadCameraIntrinsicMatrix;
|
2019-08-17 03:49:25 +02:00
|
|
|
BOOL _setupDone;
|
|
|
|
BOOL _useDepth;
|
|
|
|
BOOL _useCustomOrientation;
|
2020-06-06 01:49:27 +02:00
|
|
|
BOOL _videoMirrored;
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
- (instancetype)init {
|
|
|
|
self = [super init];
|
|
|
|
if (self) {
|
|
|
|
_cameraPosition = AVCaptureDevicePositionBack;
|
|
|
|
_session = [[AVCaptureSession alloc] init];
|
|
|
|
_pixelFormatType = kCVPixelFormatType_32BGRA;
|
|
|
|
|
|
|
|
AVAuthorizationStatus status =
|
|
|
|
[AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
|
|
|
|
_authorized = status == AVAuthorizationStatusAuthorized;
|
|
|
|
}
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
2020-02-10 22:27:13 +01:00
|
|
|
- (void)setDelegate:(id<MPPInputSourceDelegate>)delegate queue:(dispatch_queue_t)queue {
|
2019-08-17 03:49:25 +02:00
|
|
|
[super setDelegate:delegate queue:queue];
|
|
|
|
// Note that _depthDataOutput and _videoDataOutput may not have been created yet. In that case,
|
|
|
|
// this message to nil is ignored, and the delegate will be set later by setupCamera.
|
|
|
|
[_videoDataOutput setSampleBufferDelegate:self queue:queue];
|
|
|
|
[_depthDataOutput setDelegate:self callbackQueue:queue];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)start {
|
|
|
|
if (!_setupDone) [self setupCamera];
|
|
|
|
if (_autoRotateBuffers) {
|
|
|
|
[self enableAutoRotateBufferObserver:YES];
|
|
|
|
}
|
|
|
|
[_session startRunning];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)stop {
|
|
|
|
if (_autoRotateBuffers) {
|
|
|
|
[self enableAutoRotateBufferObserver:NO];
|
|
|
|
}
|
|
|
|
[_session stopRunning];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)isRunning {
|
|
|
|
return _session.isRunning;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setCameraPosition:(AVCaptureDevicePosition)cameraPosition {
|
|
|
|
BOOL wasRunning = [self isRunning];
|
|
|
|
if (wasRunning) {
|
|
|
|
[self stop];
|
|
|
|
}
|
|
|
|
_cameraPosition = cameraPosition;
|
|
|
|
_setupDone = NO;
|
|
|
|
if (wasRunning) {
|
|
|
|
[self start];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setUseDepth:(BOOL)useDepth {
|
|
|
|
if (useDepth == _useDepth) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
BOOL wasRunning = [self isRunning];
|
|
|
|
if (wasRunning) {
|
|
|
|
[self stop];
|
|
|
|
}
|
|
|
|
_useDepth = useDepth;
|
|
|
|
_setupDone = NO;
|
|
|
|
if (wasRunning) {
|
|
|
|
[self start];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setOrientation:(AVCaptureVideoOrientation)orientation {
|
|
|
|
if (orientation == _orientation) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
BOOL wasRunning = [self isRunning];
|
|
|
|
if (wasRunning) {
|
|
|
|
[self stop];
|
|
|
|
}
|
|
|
|
|
|
|
|
_orientation = orientation;
|
|
|
|
_useCustomOrientation = YES;
|
|
|
|
_setupDone = NO;
|
|
|
|
if (wasRunning) {
|
|
|
|
[self start];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-06 01:49:27 +02:00
|
|
|
- (void)setVideoMirrored:(BOOL)videoMirrored {
|
|
|
|
if (videoMirrored == _videoMirrored) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
BOOL wasRunning = [self isRunning];
|
|
|
|
if (wasRunning) {
|
|
|
|
[self stop];
|
|
|
|
}
|
|
|
|
_videoMirrored = videoMirrored;
|
|
|
|
_setupDone = NO;
|
|
|
|
if (wasRunning) {
|
|
|
|
[self start];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-17 03:49:25 +02:00
|
|
|
- (void)setAutoRotateBuffers:(BOOL)autoRotateBuffers {
|
|
|
|
if (autoRotateBuffers == _autoRotateBuffers) {
|
|
|
|
return; // State has not changed.
|
|
|
|
}
|
|
|
|
_autoRotateBuffers = autoRotateBuffers;
|
|
|
|
if ([self isRunning]) {
|
|
|
|
// Enable or disable observer this settings changes while this input source is running.
|
|
|
|
[self enableAutoRotateBufferObserver:_autoRotateBuffers];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)enableAutoRotateBufferObserver:(BOOL)enable {
|
|
|
|
if (enable) {
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
|
|
|
selector:@selector(deviceOrientationChanged)
|
|
|
|
name:UIDeviceOrientationDidChangeNotification
|
|
|
|
object:nil];
|
|
|
|
// Trigger a device orientation change instead of waiting for the first change.
|
|
|
|
[self deviceOrientationChanged];
|
|
|
|
} else {
|
|
|
|
[[NSNotificationCenter defaultCenter] removeObserver:self
|
|
|
|
name:UIDeviceOrientationDidChangeNotification
|
|
|
|
object:nil];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
- (OSType)pixelFormatType {
|
|
|
|
return _pixelFormatType;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setPixelFormatType:(OSType)pixelFormatType {
|
|
|
|
_pixelFormatType = pixelFormatType;
|
|
|
|
if ([self isRunning]) {
|
2020-02-10 22:27:13 +01:00
|
|
|
_videoDataOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey : @(_pixelFormatType)};
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#pragma mark - Camera-specific methods
|
|
|
|
|
|
|
|
- (NSString*)sessionPreset {
|
|
|
|
return _session.sessionPreset;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setSessionPreset:(NSString*)sessionPreset {
|
|
|
|
_session.sessionPreset = sessionPreset;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setupCamera {
|
|
|
|
NSError* error = nil;
|
|
|
|
|
|
|
|
if (_videoDeviceInput) {
|
|
|
|
[_session removeInput:_videoDeviceInput];
|
|
|
|
}
|
|
|
|
|
2020-05-21 18:46:31 +02:00
|
|
|
AVCaptureDeviceType deviceType = AVCaptureDeviceTypeBuiltInWideAngleCamera;
|
|
|
|
if (@available(iOS 11.1, *)) {
|
|
|
|
if (_cameraPosition == AVCaptureDevicePositionFront && _useDepth) {
|
|
|
|
deviceType = AVCaptureDeviceTypeBuiltInTrueDepthCamera;
|
|
|
|
}
|
|
|
|
}
|
2019-08-17 03:49:25 +02:00
|
|
|
AVCaptureDeviceDiscoverySession* deviceDiscoverySession = [AVCaptureDeviceDiscoverySession
|
2020-05-21 18:46:31 +02:00
|
|
|
discoverySessionWithDeviceTypes:@[ deviceType ]
|
2019-08-17 03:49:25 +02:00
|
|
|
mediaType:AVMediaTypeVideo
|
|
|
|
position:_cameraPosition];
|
|
|
|
AVCaptureDevice* videoDevice =
|
|
|
|
[deviceDiscoverySession devices]
|
|
|
|
? [deviceDiscoverySession devices].firstObject
|
|
|
|
: [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
|
|
|
_videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
|
|
|
|
if (error) {
|
|
|
|
NSLog(@"%@", error);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
[_session addInput:_videoDeviceInput];
|
|
|
|
|
|
|
|
if (!_videoDataOutput) {
|
|
|
|
_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
|
|
|
[_session addOutput:_videoDataOutput];
|
|
|
|
|
|
|
|
// Set this when we have a handler.
|
|
|
|
if (self.delegateQueue)
|
|
|
|
[_videoDataOutput setSampleBufferDelegate:self queue:self.delegateQueue];
|
|
|
|
_videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
|
|
|
|
|
|
|
|
// Only a few pixel formats are available for capture output:
|
|
|
|
// kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
|
|
|
|
// kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
|
|
|
|
// kCVPixelFormatType_32BGRA.
|
2020-02-10 22:27:13 +01:00
|
|
|
_videoDataOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey : @(_pixelFormatType)};
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Remove Old Depth Depth
|
|
|
|
if (_depthDataOutput) {
|
|
|
|
[_session removeOutput:_depthDataOutput];
|
|
|
|
}
|
|
|
|
|
2020-05-21 18:46:31 +02:00
|
|
|
if (@available(iOS 11.1, *)) {
|
|
|
|
if (_useDepth) {
|
|
|
|
// Add Depth Output
|
|
|
|
_depthDataOutput = [[AVCaptureDepthDataOutput alloc] init];
|
|
|
|
_depthDataOutput.alwaysDiscardsLateDepthData = YES;
|
|
|
|
if ([_session canAddOutput:_depthDataOutput]) {
|
|
|
|
[_session addOutput:_depthDataOutput];
|
|
|
|
|
2022-10-17 21:13:09 +02:00
|
|
|
AVCaptureConnection* __unused connection =
|
2020-05-21 18:46:31 +02:00
|
|
|
[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData];
|
|
|
|
|
|
|
|
// Set this when we have a handler.
|
|
|
|
if (self.delegateQueue) {
|
|
|
|
[_depthDataOutput setDelegate:self callbackQueue:self.delegateQueue];
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
_depthDataOutput = nil;
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
2020-05-21 18:46:31 +02:00
|
|
|
}
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (_useCustomOrientation) {
|
|
|
|
AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
|
|
connection.videoOrientation = _orientation;
|
|
|
|
}
|
|
|
|
|
2020-05-21 18:46:31 +02:00
|
|
|
if (@available(iOS 11.0, *)) {
|
2020-01-10 02:51:05 +01:00
|
|
|
AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
|
|
if ([connection isCameraIntrinsicMatrixDeliverySupported]) {
|
|
|
|
[connection setCameraIntrinsicMatrixDeliveryEnabled:YES];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-06 01:49:27 +02:00
|
|
|
if (_videoMirrored) {
|
|
|
|
AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
|
|
connection.videoMirrored = _videoMirrored;
|
|
|
|
}
|
|
|
|
|
2019-08-17 03:49:25 +02:00
|
|
|
_setupDone = YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)requestCameraAccessWithCompletionHandler:(void (^)(BOOL))handler {
|
|
|
|
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
|
|
|
|
completionHandler:^(BOOL granted) {
|
|
|
|
_authorized = granted;
|
|
|
|
if (handler) {
|
|
|
|
handler(granted);
|
|
|
|
}
|
|
|
|
}];
|
|
|
|
}
|
|
|
|
|
|
|
|
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate methods
|
|
|
|
|
|
|
|
// Receives frames from the camera. Invoked on self.frameHandlerQueue.
|
|
|
|
- (void)captureOutput:(AVCaptureOutput*)captureOutput
|
2020-02-10 22:27:13 +01:00
|
|
|
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
|
|
fromConnection:(AVCaptureConnection*)connection {
|
2020-05-21 18:46:31 +02:00
|
|
|
if (@available(iOS 11.0, *)) {
|
|
|
|
if (!_didReadCameraIntrinsicMatrix) {
|
|
|
|
// Get camera intrinsic matrix.
|
|
|
|
CFTypeRef cameraIntrinsicData =
|
|
|
|
CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil);
|
|
|
|
if (cameraIntrinsicData != nil) {
|
|
|
|
CFDataRef cfdr = (CFDataRef)cameraIntrinsicData;
|
|
|
|
matrix_float3x3* intrinsicMatrix = (matrix_float3x3*)(CFDataGetBytePtr(cfdr));
|
|
|
|
if (intrinsicMatrix != nil) {
|
|
|
|
_cameraIntrinsicMatrix = *intrinsicMatrix;
|
|
|
|
}
|
2020-01-10 02:51:05 +01:00
|
|
|
}
|
2020-05-21 18:46:31 +02:00
|
|
|
_didReadCameraIntrinsicMatrix = YES;
|
2020-01-10 02:51:05 +01:00
|
|
|
}
|
|
|
|
}
|
2020-02-10 22:27:13 +01:00
|
|
|
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
|
|
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
|
|
|
if ([self.delegate respondsToSelector:@selector(processVideoFrame:timestamp:fromSource:)]) {
|
|
|
|
[self.delegate processVideoFrame:imageBuffer timestamp:timestamp fromSource:self];
|
|
|
|
} else if ([self.delegate respondsToSelector:@selector(processVideoFrame:fromSource:)]) {
|
|
|
|
[self.delegate processVideoFrame:imageBuffer fromSource:self];
|
|
|
|
}
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#pragma mark - AVCaptureDepthDataOutputDelegate methods
|
|
|
|
|
|
|
|
// Receives depth frames from the camera. Invoked on self.frameHandlerQueue.
|
2020-02-10 22:27:13 +01:00
|
|
|
- (void)depthDataOutput:(AVCaptureDepthDataOutput*)output
|
|
|
|
didOutputDepthData:(AVDepthData*)depthData
|
2019-08-17 03:49:25 +02:00
|
|
|
timestamp:(CMTime)timestamp
|
2020-05-21 18:46:31 +02:00
|
|
|
connection:(AVCaptureConnection*)connection API_AVAILABLE(ios(11.0)) {
|
2019-08-17 03:49:25 +02:00
|
|
|
if (depthData.depthDataType != kCVPixelFormatType_DepthFloat32) {
|
|
|
|
depthData = [depthData depthDataByConvertingToDepthDataType:kCVPixelFormatType_DepthFloat32];
|
|
|
|
}
|
|
|
|
[self.delegate processDepthData:depthData timestamp:timestamp fromSource:self];
|
|
|
|
}
|
|
|
|
|
|
|
|
#pragma mark - NSNotificationCenter event handlers
|
|
|
|
|
|
|
|
- (void)deviceOrientationChanged {
|
|
|
|
AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
|
|
|
connection.videoOrientation = (AVCaptureVideoOrientation)[UIDevice currentDevice].orientation;
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|