diff --git a/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.h b/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.h index bf494ef18..243192adf 100644 --- a/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.h +++ b/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.h @@ -50,9 +50,11 @@ @property (nonatomic, weak) id cameraDelegate; @property (nonatomic) dispatch_queue_t displayRenderQueue; +@property (nonatomic) dispatch_queue_t offlineRenderQueue; /// 原始相机纹理 可以快速读取 @property (nonatomic, readonly, strong) OlaShareTexture *cameraTexture; +@property (nonatomic, readonly, strong) OlaShareTexture *halfCameraTexture; @property (nonatomic, readonly, strong) OlaShareTexture *shareTexture; /// 不带后处理的相机渲染的原始纹理 diff --git a/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.mm b/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.mm index a05ba6cdb..628b797eb 100644 --- a/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.mm +++ b/mediapipe/render/ios/Camera/OlaMTLCameraRenderView.mm @@ -12,7 +12,7 @@ #import #import -static const NSUInteger MaxFramesInFlight = 3; +static const NSUInteger MaxFramesInFlight = 1; static size_t const kOlaDynamicTextureByteAlignment = 16; NS_INLINE size_t QAAlignSize(size_t size) @@ -52,7 +52,7 @@ NS_INLINE size_t QAAlignSize(size_t size) _shareTexture = nil; _cameraTexture = nil; - + _halfCameraTexture = nil; } - (instancetype)initWithFrame:(CGRect)frame @@ -104,6 +104,11 @@ NS_INLINE size_t QAAlignSize(size_t size) metalPixelFormat:self.colorPixelFormat size:textureSize]; + _halfCameraTexture = [[OlaShareTexture alloc] initWithMetalDevice:self.device + openGLContext:self.openGLContext + metalPixelFormat:self.colorPixelFormat + size:CGSizeMake(textureSize.width * 0.25, textureSize.height * 0.25)]; + _mtlRender = [[OlaMTLCameraRender alloc] initWithRenderSize:textureSize device:self.device cameraTexture:self.cameraTexture @@ -118,6 +123,7 @@ NS_INLINE size_t QAAlignSize(size_t size) self.displayFrameRenderingSemaphore = dispatch_semaphore_create(MaxFramesInFlight); self.displayRenderQueue = dispatch_queue_create("Ola.ios.displayRenderQueue", interactive); + self.offlineRenderQueue = dispatch_queue_create("Ola.ios.offlineRenderQueue", interactive); self.cameraFrameRenderingSemaphore = dispatch_semaphore_create(1); } @@ -209,6 +215,10 @@ NS_INLINE size_t QAAlignSize(size_t size) [strongSelf.mtlRender renderToShareTexture:strongSelf.shareTexture.metalTexture commandBuffer:commandBuffer frameTime:strongSelf.frameTime]; + [strongSelf.mtlRender renderToTexture:strongSelf.halfCameraTexture.metalTexture + from:strongSelf.cameraTexture.metalTexture commandBuffer:commandBuffer + textureCoordinate:strongSelf.mtlRender.noRotationBuffer]; + [commandBuffer commit]; commandBuffer = [strongSelf.mtlRender.commandQueue commandBuffer]; @@ -234,14 +244,13 @@ NS_INLINE size_t QAAlignSize(size_t size) [EAGLContext setCurrentContext:strongSelf.openGLContext]; [strongSelf.cameraDelegate draw:strongSelf.frameTime]; - - [strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.cameraTexture - onScreenTexture:strongSelf.shareTexture - frameTime:strongSelf.frameTime * 1000]; - + IOSurfaceID surfaceId = [strongSelf.cameraDelegate externalRender:strongSelf.frameTime targetTexture:strongSelf.cameraTexture commandBuffer:commandBuffer]; + [strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.halfCameraTexture + onScreenTexture:strongSelf.shareTexture + frameTime:strongSelf.frameTime * 1000]; if (surfaceId != -1) { //这里渲染surfaceId IOSurfaceRef ioSurface = IOSurfaceLookup(surfaceId); @@ -320,14 +329,14 @@ NS_INLINE size_t QAAlignSize(size_t size) }; CFRetain(pixelbuffer); - dispatch_async(self.displayRenderQueue, ^{ + dispatch_async(self.offlineRenderQueue, ^{ if (weakSelf == nil) { CFRelease(pixelbuffer); return; } __strong OlaMTLCameraRenderView *strongSelf = weakSelf; [strongSelf.mtlRender renderToCameraTextureWithPixelBuffer:pixelbuffer completedHandler:renderCompleted]; - + CFRelease(pixelbuffer); }); } @@ -351,7 +360,7 @@ NS_INLINE size_t QAAlignSize(size_t size) }; CFRetain(sampleBuffer); - dispatch_async(self.displayRenderQueue, ^{ + dispatch_async(self.offlineRenderQueue, ^{ if (weakSelf == nil) { CFRelease(sampleBuffer); return; @@ -359,8 +368,9 @@ NS_INLINE size_t QAAlignSize(size_t size) __strong OlaMTLCameraRenderView *strongSelf = weakSelf; CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); [strongSelf.mtlRender renderToCameraTextureWithPixelBuffer:pixelBuffer completedHandler:renderCompleted]; - + CFRelease(sampleBuffer); + }); } diff --git a/mediapipe/render/module/beauty/face_mesh_module_imp.cc b/mediapipe/render/module/beauty/face_mesh_module_imp.cc index 5184041e0..c0a3ac150 100644 --- a/mediapipe/render/module/beauty/face_mesh_module_imp.cc +++ b/mediapipe/render/module/beauty/face_mesh_module_imp.cc @@ -25,11 +25,11 @@ namespace Opipe void FaceMeshCallFrameDelegate::outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer, const std::string &streamName, int64_t timestamp) { - _imp->currentDispatch()->runSync([&] { - IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixelbuffer); - IOSurfaceID surfaceId = IOSurfaceGetID(surface); - Log("Opipe", "streamName %s timeStamp:%ld iosurfaceid:%d", streamName.c_str(), timestamp, surfaceId); - }); +// _imp->currentDispatch()->runSync([&] { +// IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixelbuffer); +// IOSurfaceID surfaceId = IOSurfaceGetID(surface); +// Log("Opipe", "streamName %s timeStamp:%ld iosurfaceid:%d", streamName.c_str(), timestamp, surfaceId); +// }); } #endif diff --git a/mediapipe/render/module/beauty/filters/FaceDistortionFilter.cpp b/mediapipe/render/module/beauty/filters/FaceDistortionFilter.cpp index 4097015f7..b943ef4ba 100644 --- a/mediapipe/render/module/beauty/filters/FaceDistortionFilter.cpp +++ b/mediapipe/render/module/beauty/filters/FaceDistortionFilter.cpp @@ -249,7 +249,7 @@ namespace Opipe { Vector2 point1 = _positionAt(362); Vector2 point2 = _positionAt(263); - Vector2 point3 = _positionAt(417); + Vector2 point3 = _positionAt(168); Vector2 center = point1.getCenter(point2); float distance = center.distance(point3); addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1); @@ -259,7 +259,7 @@ namespace Opipe { Vector2 point1 = _positionAt(33); Vector2 point2 = _positionAt(133); - Vector2 point3 = _positionAt(193); + Vector2 point3 = _positionAt(168); Vector2 center = point1.getCenter(point2); float distance = center.distance(point3); addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1); diff --git a/mediapipe/render/module/beauty/filters/OlaBeautyFilter.cpp b/mediapipe/render/module/beauty/filters/OlaBeautyFilter.cpp index dc6d9d8af..09ccb3818 100644 --- a/mediapipe/render/module/beauty/filters/OlaBeautyFilter.cpp +++ b/mediapipe/render/module/beauty/filters/OlaBeautyFilter.cpp @@ -67,33 +67,42 @@ namespace Opipe { } _lutFilter = LUTFilter::create(context); _unSharpMaskFilter = UnSharpMaskFilter::create(context); - _unSharpMaskFilter->addTarget(_lutFilter, 0); + _faceDistortFilter = FaceDistortionFilter::create(context); _bilateralAdjustFilter = BilateralAdjustFilter::create(context); - addFilter(_bilateralAdjustFilter); - - _lookUpGroupFilter = FilterGroup::create(context); - _lookUpGroupFilter->addFilter(_unSharpMaskFilter); - _alphaBlendFilter = AlphaBlendFilter::create(context); _bilateralFilter = BilateralFilter::create(context); + _lookUpGroupFilter = FilterGroup::create(context); + + + + + addFilter(_bilateralAdjustFilter); addFilter(_bilateralFilter); - + + + _unSharpMaskFilter->addTarget(_lutFilter, 0); + + _lookUpGroupFilter->addFilter(_unSharpMaskFilter); + _lookUpGroupFilter->setTerminalFilter(_lutFilter); + + + _bilateralAdjustFilter->addTarget(_lookUpGroupFilter)->addTarget(_alphaBlendFilter, 1); - + _bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->addTarget(_alphaBlendFilter, 0); - + _alphaBlendFilter->setMix(0.8); - - + + _bilateralAdjustFilter->setOpacityLimit(0.6); _bilateralFilter->setDistanceNormalizationFactor(2.746); _bilateralFilter->setTexelSpacingMultiplier(2.7); _unSharpMaskFilter->setBlurRadiusInPixel(4.0f, true); _unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false); _unSharpMaskFilter->setIntensity(1.365); - + _alphaBlendFilter->addTarget(_faceDistortFilter); setTerminalFilter(_faceDistortFilter); @@ -120,12 +129,7 @@ namespace Opipe { registerProperty("skin", 0.0f, "磨皮 0.0 - 1.0", [this](float skin) { - if (skin == 0.0) { - _bilateralAdjustFilter->setEnable(false); - } else { - _bilateralAdjustFilter->setEnable(true); - _bilateralAdjustFilter->setOpacityLimit(skin); - } + _bilateralAdjustFilter->setOpacityLimit(skin); }); registerProperty("whiten", 0.0f, "美白 0.0 - 1.0", diff --git a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample.xcodeproj/project.xcworkspace/xcuserdata/wangrenzhu.xcuserdatad/UserInterfaceState.xcuserstate b/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample.xcodeproj/project.xcworkspace/xcuserdata/wangrenzhu.xcuserdatad/UserInterfaceState.xcuserstate deleted file mode 100644 index 1e47bdc80..000000000 Binary files a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample.xcodeproj/project.xcworkspace/xcuserdata/wangrenzhu.xcuserdatad/UserInterfaceState.xcuserstate and /dev/null differ diff --git a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/Base.lproj/Main.storyboard b/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/Base.lproj/Main.storyboard index 1326991b0..253113538 100644 --- a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/Base.lproj/Main.storyboard +++ b/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/Base.lproj/Main.storyboard @@ -18,7 +18,7 @@ - + @@ -27,7 +27,7 @@ - + @@ -37,31 +37,31 @@ - + @@ -71,7 +71,7 @@ - + @@ -93,7 +93,7 @@ - + diff --git a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/ViewController.mm b/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/ViewController.mm index 9d76197cc..398a26287 100644 --- a/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/ViewController.mm +++ b/mediapipe/render/module/beauty/ios/example/OpipeBeautyModuleExample/OpipeBeautyModuleExample/ViewController.mm @@ -269,6 +269,7 @@ AVCaptureAudioDataOutputSampleBufferDelegate> { targetTexture:(OlaShareTexture *)targetTexture commandBuffer:(id)buffer { +// [[OlaFaceUnity sharedInstance] processVideoFrame:targetTexture.renderTarget timeStamp:frameTime]; FaceTextureInfo inputTexture; inputTexture.width = targetTexture.size.width; inputTexture.height = targetTexture.size.height; diff --git a/mediapipe/render/module/beauty/ios/framework/OlaFaceUnity.mm b/mediapipe/render/module/beauty/ios/framework/OlaFaceUnity.mm index 7677e5deb..8b445d411 100644 --- a/mediapipe/render/module/beauty/ios/framework/OlaFaceUnity.mm +++ b/mediapipe/render/module/beauty/ios/framework/OlaFaceUnity.mm @@ -50,27 +50,29 @@ - (FaceTextureInfo)render:(FaceTextureInfo)inputTexture { - @autoreleasepool { - TextureInfo rs; - rs.ioSurfaceId = inputTexture.ioSurfaceId; - if (_face_module) { - TextureInfo input; - input.width = inputTexture.width; - input.height = inputTexture.height; - input.ioSurfaceId = inputTexture.ioSurfaceId; - input.textureId = inputTexture.textureId; - input.frameTime = inputTexture.frameTime; - - rs = _face_module->renderTexture(input); - } - FaceTextureInfo result; - result.width = rs.width; - result.height = rs.height; - result.ioSurfaceId = rs.ioSurfaceId; - result.textureId = rs.textureId; - result.frameTime = rs.frameTime; - return result; + TextureInfo rs; + rs.ioSurfaceId = inputTexture.ioSurfaceId; + rs.width = inputTexture.width; + rs.height = inputTexture.height; + rs.textureId = inputTexture.textureId; + rs.frameTime = inputTexture.frameTime; + if (_face_module) { + TextureInfo input; + input.width = inputTexture.width; + input.height = inputTexture.height; + input.ioSurfaceId = inputTexture.ioSurfaceId; + input.textureId = inputTexture.textureId; + input.frameTime = inputTexture.frameTime; + + rs = _face_module->renderTexture(input); } + FaceTextureInfo result; + result.width = rs.width; + result.height = rs.height; + result.ioSurfaceId = rs.ioSurfaceId; + result.textureId = rs.textureId; + result.frameTime = rs.frameTime; + return result; } diff --git a/mediapipe/render/module/common/ola_graph.cc b/mediapipe/render/module/common/ola_graph.cc index 9455fb53f..44836b7c9 100644 --- a/mediapipe/render/module/common/ola_graph.cc +++ b/mediapipe/render/module/common/ola_graph.cc @@ -19,29 +19,35 @@ namespace Opipe const mediapipe::Packet &packet) { OlaGraph *graph = (OlaGraph *)wrapperVoid; - if (graph->_delegate.expired()) - { - return; - } - - graph->_delegate.lock()->outputPacket(graph, packet, streamName); - - if (packetType == MPPPacketTypeRaw) - { - graph->_delegate.lock()->outputPacket(graph, packet, packetType, streamName); - } #if defined(__APPLE__) - else if (packetType == MPPPacketTypePixelBuffer || - packetType == MPPPacketTypeImage) - { - graph->_framesInFlight--; - CVPixelBufferRef pixelBuffer; - if (packetType == MPPPacketTypePixelBuffer) - pixelBuffer = mediapipe::GetCVPixelBufferRef(packet.Get()); - else - pixelBuffer = packet.Get().GetCVPixelBufferRef(); + @autoreleasepool { +#endif + if (graph->_delegate.expired()) + { + return; + } - graph->_delegate.lock()->outputPixelbuffer(graph, pixelBuffer, streamName, packet.Timestamp().Value()); + graph->_delegate.lock()->outputPacket(graph, packet, streamName); + + if (packetType == MPPPacketTypeRaw) + { + graph->_delegate.lock()->outputPacket(graph, packet, packetType, streamName); + } else if (packetType == MPPPacketTypeImageFrame) { + graph->_framesInFlight--; + } + #if defined(__APPLE__) + else if (packetType == MPPPacketTypePixelBuffer || + packetType == MPPPacketTypeImage) + { + graph->_framesInFlight--; + CVPixelBufferRef pixelBuffer; + if (packetType == MPPPacketTypePixelBuffer) + pixelBuffer = mediapipe::GetCVPixelBufferRef(packet.Get()); + else + pixelBuffer = packet.Get().GetCVPixelBufferRef(); + + graph->_delegate.lock()->outputPixelbuffer(graph, pixelBuffer, streamName, packet.Timestamp().Value()); + } } #endif