Merge branch 'develop' of http://114.55.7.123:3000/client_beauty/Opipe into develop

This commit is contained in:
WangQiang 2022-08-03 19:10:36 +08:00
commit 8ae5b18fd0
40 changed files with 2418 additions and 8841 deletions

View File

@ -67,3 +67,10 @@ mediapipe_binary_graph(
output_name = "face_mesh_mobile_gpu.binarypb", output_name = "face_mesh_mobile_gpu.binarypb",
deps = [":mobile_calculators"], deps = [":mobile_calculators"],
) )
mediapipe_binary_graph(
name = "face_mesh_mobile_landmark_gpu_binary_graph",
graph = "face_mesh_mobile_landmark.pbtxt",
output_name = "face_mesh_mobile_landmark_gpu.binarypb",
deps = [":mobile_calculators"],
)

View File

@ -1,5 +1,5 @@
# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU. # MediaPipe graph that performs face mesh with TensorFlow Lite on GPU.
max_queue_size: 2
# GPU buffer. (GpuBuffer) # GPU buffer. (GpuBuffer)
input_stream: "input_video" input_stream: "input_video"

View File

@ -0,0 +1,69 @@
# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU.
# GPU buffer. (GpuBuffer)
input_stream: "input_video"
# Max number of faces to detect/process. (int)
input_side_packet: "num_faces"
# Output image with rendered results. (GpuBuffer)
output_stream: "output_video"
# Collection of detected/processed faces, each represented as a list of
# landmarks. (std::vector<NormalizedLandmarkList>)
output_stream: "multi_face_landmarks"
# output_stream: "face_detections"
# Throttles the images flowing downstream for flow control. It passes through
# the very first incoming image unaltered, and waits for downstream nodes
# (calculators and subgraphs) in the graph to finish their tasks before it
# passes through another image. All images that come in while waiting are
# dropped, limiting the number of in-flight images in most part of the graph to
# 1. This prevents the downstream nodes from queuing up incoming images and data
# excessively, which leads to increased latency and memory usage, unwanted in
# real-time mobile applications. It also eliminates unnecessarily computation,
# e.g., the output produced by a node may get dropped downstream if the
# subsequent nodes are still busy processing previous inputs.
node {
calculator: "FlowLimiterCalculator"
input_stream: "input_video"
input_stream: "FINISHED:output_video"
input_stream_info: {
tag_index: "FINISHED"
back_edge: true
}
output_stream: "throttled_input_video"
}
# Defines side packets for further use in the graph.
node {
calculator: "ConstantSidePacketCalculator"
output_side_packet: "PACKET:with_attention"
node_options: {
[type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: {
packet { bool_value: true }
}
}
}
# Subgraph that detects faces and corresponding landmarks.
node {
calculator: "FaceLandmarkFrontGpu"
input_stream: "IMAGE:throttled_input_video"
input_side_packet: "NUM_FACES:num_faces"
input_side_packet: "WITH_ATTENTION:with_attention"
output_stream: "LANDMARKS:multi_face_landmarks"
output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks"
output_stream: "DETECTIONS:face_detections"
output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections"
}
# Subgraph that renders face-landmark annotation onto the input image. //画标记 用于调试
node {
calculator: "FaceRendererGpu"
input_stream: "IMAGE:throttled_input_video"
input_stream: "LANDMARKS:multi_face_landmarks"
input_stream: "NORM_RECTS:face_rects_from_landmarks"
input_stream: "DETECTIONS:face_detections"
output_stream: "IMAGE:output_video"
}

View File

@ -63,6 +63,13 @@ cc_library(
"//mediapipe:apple": [], "//mediapipe:apple": [],
"//conditions:default": [], "//conditions:default": [],
}), }),
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
) )
cc_library( cc_library(

View File

@ -341,8 +341,8 @@ bool Filter::proceed(float frameTime, bool bUpdateTargets/* = true*/) {
CHECK_GL(glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)); CHECK_GL(glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0));
filter_externDraw(); filter_externDraw();
_framebuffer->inactive(); _framebuffer->inactive();
Log("Filter", "%s渲染完毕准备开始Unlock Framebuffer:%s", typeid(*this).name(), // Log("Filter", "%s渲染完毕准备开始Unlock Framebuffer:%s", typeid(*this).name(),
_framebuffer->_hashCode.c_str()); // _framebuffer->_hashCode.c_str());
#if DEBUG #if DEBUG
_framebuffer->unlock(typeid(*this).name()); _framebuffer->unlock(typeid(*this).name());
#else #else
@ -502,6 +502,43 @@ void Filter::update(float frameTime) {
_framebuffer = 0; _framebuffer = 0;
} }
bool Filter::getProperty(const std::string& name, std::vector<Vec2>& retValue) {
Property* property = _getProperty(name);
if (!property) return false;
retValue = ((Vec2ArrayProperty*)property)->value;
return true;
}
bool Filter::registerProperty(const std::string& name,
std::vector<Vec2> defaultValue,
const std::string& comment/* = ""*/,
std::function<void(std::vector<Vec2>&)> setCallback/* = 0*/) {
if (hasProperty(name)) return false;
Vec2ArrayProperty property;
property.type = "vec2Array";
property.value = defaultValue;
property.comment = comment;
property.setCallback = setCallback;
_vec2ArrayProperties[name] = property;
return true;
}
bool Filter::setProperty(const std::string& name, std::vector<Vec2> value) {
Property* rawProperty = _getProperty(name);
if (!rawProperty) {
Log("WARNING", "Filter::setProperty invalid property %s", name.c_str());
return false;
} else if (rawProperty->type != "vec2Array") {
Log("WARNING", "Filter::setProperty The property type is expected to be %s", rawProperty->type.c_str());
return false;
}
Vec2ArrayProperty* property = ((Vec2ArrayProperty *)rawProperty);
property->value = value;
if (property->setCallback)
property->setCallback(value);
return true;
}
bool Filter::registerProperty(const std::string& name, int defaultValue, const std::string& comment/* = ""*/, std::function<void(int&)> setCallback/* = 0*/) { bool Filter::registerProperty(const std::string& name, int defaultValue, const std::string& comment/* = ""*/, std::function<void(int&)> setCallback/* = 0*/) {
if (hasProperty(name)) return false; if (hasProperty(name)) return false;
IntProperty property; IntProperty property;
@ -616,6 +653,14 @@ Filter::Property* Filter::_getProperty(const std::string& name) {
return &_stringProperties[name]; return &_stringProperties[name];
} }
if (_vec2ArrayProperties.find(name) != _vec2ArrayProperties.end()) {
return &_vec2ArrayProperties[name];
}
if (_vec2Properties.find(name) != _vec2Properties.end()) {
return &_vec2Properties[name];
}
return 0; return 0;
} }

View File

@ -122,9 +122,22 @@ public:
virtual bool registerProperty(const std::string& name, int defaultValue, const std::string& comment = "", std::function<void(int&)> setCallback = 0); virtual bool registerProperty(const std::string& name, int defaultValue, const std::string& comment = "", std::function<void(int&)> setCallback = 0);
virtual bool registerProperty(const std::string& name, float defaultValue, const std::string& comment = "", std::function<void(float&)> setCallback = 0); virtual bool registerProperty(const std::string& name, float defaultValue, const std::string& comment = "", std::function<void(float&)> setCallback = 0);
virtual bool registerProperty(const std::string& name, const std::string& defaultValue, const std::string& comment = "", std::function<void(std::string&)> setCallback = 0); virtual bool registerProperty(const std::string& name, const std::string& defaultValue, const std::string& comment = "", std::function<void(std::string&)> setCallback = 0);
bool registerProperty(const std::string& name,
std::vector<Vec2> defaultValue,
const std::string& comment = "",
std::function<void(std::vector<Vec2>&)> setCallback = 0);
bool registerProperty(const std::string& name,
Vec2 defaultValue,
const std::string& comment = "",
std::function<void(Vec2&)> setCallback = 0);
bool setProperty(const std::string& name, Vec2 value);
bool setProperty(const std::string& name, int value); bool setProperty(const std::string& name, int value);
bool setProperty(const std::string& name, float value); bool setProperty(const std::string& name, float value);
bool setProperty(const std::string& name, std::string value); bool setProperty(const std::string& name, std::string value);
bool setProperty(const std::string& name, std::vector<Vec2> retValue);
bool getProperty(const std::string& name, std::vector<Vec2>& retValue);
bool getProperty(const std::string& name, int& retValue); bool getProperty(const std::string& name, int& retValue);
bool getProperty(const std::string& name, float& retValue); bool getProperty(const std::string& name, float& retValue);
bool getProperty(const std::string& name, std::string& retValue); bool getProperty(const std::string& name, std::string& retValue);
@ -223,8 +236,27 @@ protected:
std::string type; std::string type;
std::string comment; std::string comment;
}; };
struct Vec2ArrayProperty : Property {
std::vector<Vec2> value;
std::function<void(std::vector<Vec2>&)> setCallback;
};
std::map<std::string, Vec2ArrayProperty> _vec2ArrayProperties;
struct Vec2Property : Property {
Vec2 value;
std::function<void(Vec2&)> setCallback;
};
virtual Property* _getProperty(const std::string& name); virtual Property* _getProperty(const std::string& name);
std::map<std::string, Vec2Property> _vec2Properties;
struct Vec3Property : Property {
Vec3 value;
std::function<void(Vec3&)> setCallback;
};
struct IntProperty : Property { struct IntProperty : Property {
int value; int value;
std::function<void(int&)> setCallback; std::function<void(int&)> setCallback;

View File

@ -136,16 +136,16 @@ namespace Opipe {
void Framebuffer::lock(std::string lockKey) { void Framebuffer::lock(std::string lockKey) {
if (lockKey == "Unknow") { if (lockKey == "Unknow") {
Log("Framebuffer LOCK", "未知锁 【hasCode :%s】", _hashCode.c_str()); // Log("Framebuffer LOCK", "未知锁 【hasCode :%s】", _hashCode.c_str());
} else if (lockKey != _lockKey) { } else if (lockKey != _lockKey) {
Log("Framebuffer LOCK", "Key变更:%s 【hasCode :%s】", lockKey.c_str(), _hashCode.c_str()); // Log("Framebuffer LOCK", "Key变更:%s 【hasCode :%s】", lockKey.c_str(), _hashCode.c_str());
} }
_lockKey = lockKey; _lockKey = lockKey;
_framebufferRetainCount++; _framebufferRetainCount++;
Log("Framebuffer LOCK", "lock retainCount == :%d lockKey:%s 【framebufferCode:%s】", // Log("Framebuffer LOCK", "lock retainCount == :%d lockKey:%s 【framebufferCode:%s】",
_framebufferRetainCount, // _framebufferRetainCount,
lockKey.c_str(), _hashCode.c_str()); // lockKey.c_str(), _hashCode.c_str());
} }
void Framebuffer::unlock(std::string lockKey) { void Framebuffer::unlock(std::string lockKey) {
@ -156,16 +156,16 @@ namespace Opipe {
} }
if (lockKey != _lockKey) { if (lockKey != _lockKey) {
Log("Framebuffer UNLOCK", "可能是多次Lock后Unlock retainCount:%d lockKey:%s 【framebufferCode:%s】", // Log("Framebuffer UNLOCK", "可能是多次Lock后Unlock retainCount:%d lockKey:%s 【framebufferCode:%s】",
_framebufferRetainCount, // _framebufferRetainCount,
lockKey.c_str(), // lockKey.c_str(),
_hashCode.c_str()); // _hashCode.c_str());
} }
Log("Framebuffer UNLOCK", "unlock retainCount == :%d lockKey:%s 【framebufferCode:%s】" // Log("Framebuffer UNLOCK", "unlock retainCount == :%d lockKey:%s 【framebufferCode:%s】"
, _framebufferRetainCount, // , _framebufferRetainCount,
lockKey.c_str(), // lockKey.c_str(),
_hashCode.c_str()); // _hashCode.c_str());
} }
void Framebuffer::resetRetainCount() { void Framebuffer::resetRetainCount() {

View File

@ -88,16 +88,16 @@ Framebuffer* FramebufferCache::fetchFramebuffer(Context *context,
forceCleanFramebuffer(framebuffer); forceCleanFramebuffer(framebuffer);
framebuffer = 0; framebuffer = 0;
} else if (framebuffer->framebufferRetainCount() == 0 && !framebuffer->isDealloc) { } else if (framebuffer->framebufferRetainCount() == 0 && !framebuffer->isDealloc) {
Log("Framebuffer 【命中缓存】", "hashcode:%s count:%d", // Log("Framebuffer 【命中缓存】", "hashcode:%s count:%d",
framebufferHashCodeKey.first.c_str(), // framebufferHashCodeKey.first.c_str(),
framebuffer->framebufferRetainCount()); // framebuffer->framebufferRetainCount());
return framebuffer; return framebuffer;
} }
} }
} }
Log("Framebuffer 所有缓存【未命中】", "hashcode:%s count:%d", // Log("Framebuffer 所有缓存【未命中】", "hashcode:%s count:%d",
lookupHash.c_str(), // lookupHash.c_str(),
matchFramebuffersHashCode.size()); // matchFramebuffersHashCode.size());
// 如果都被占用了 或者找不到对应的Framebuffer 则需要创建一个新的 // 如果都被占用了 或者找不到对应的Framebuffer 则需要创建一个新的
if (useTextureCache) { if (useTextureCache) {

View File

@ -15,10 +15,12 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#if defined(__APPLE__)
#import <Foundation/Foundation.h>
#endif
#include "GPUImageUtil.h" #include "GPUImageUtil.h"
#define openLog 1
namespace Opipe { namespace Opipe {
@ -39,7 +41,19 @@ namespace Opipe {
} }
void Log(const std::string &tag, const std::string &format, ...) { void Log(const std::string &tag, const std::string &format, ...) {
#if openLog
char buffer[10240];
va_list args;
va_start(args, format);
vsprintf(buffer, format.c_str(), args);
va_end(args);
#if defined(__APPLE__)
NSLog(@"%s: %s", tag.c_str(), buffer);
#else
__android_log_print(ANDROID_LOG_INFO, tag.c_str(), "%s", buffer);
#endif
#endif
} }
/** /**

View File

@ -20,7 +20,7 @@
/// @param texture texture description /// @param texture texture description
/// @param onScreenTexture 上屏纹理 /// @param onScreenTexture 上屏纹理
/// @param frameTime 帧时间 /// @param frameTime 帧时间
- (IOSurfaceID)bgraCameraTextureReady:(OlaShareTexture *)texture - (void)bgraCameraTextureReady:(OlaShareTexture *)texture
onScreenTexture:(OlaShareTexture *)onScreenTexture onScreenTexture:(OlaShareTexture *)onScreenTexture
frameTime:(NSTimeInterval)frameTime; frameTime:(NSTimeInterval)frameTime;
@ -30,7 +30,7 @@
/// @param frameTime frameTime description /// @param frameTime frameTime description
/// @param targetTexture targetTexture description /// @param targetTexture targetTexture description
/// @param buffer MTL的CommandBuffer /// @param buffer MTL的CommandBuffer
- (void)externalRender:(NSTimeInterval)frameTime - (IOSurfaceID)externalRender:(NSTimeInterval)frameTime
targetTexture:(OlaShareTexture *)targetTexture targetTexture:(OlaShareTexture *)targetTexture
commandBuffer:(id<MTLCommandBuffer>)buffer; commandBuffer:(id<MTLCommandBuffer>)buffer;
@ -71,6 +71,8 @@
/// @param sampleBuffer 相机采集流 /// @param sampleBuffer 相机采集流
- (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer; - (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer;
- (void)renderPixelbuffer:(CVPixelBufferRef)pixelbuffer;
- (void)addRender:(OlaCameraRender *)render; - (void)addRender:(OlaCameraRender *)render;

View File

@ -231,21 +231,23 @@ NS_INLINE size_t QAAlignSize(size_t size)
if (strongSelf.cameraDelegate && !strongSelf.isPaused) { if (strongSelf.cameraDelegate && !strongSelf.isPaused) {
if (@available(iOS 11.0, *)) { if (@available(iOS 11.0, *)) {
glFlush(); glFlush();
[EAGLContext setCurrentContext:strongSelf.openGLContext];
[strongSelf.cameraDelegate draw:strongSelf.frameTime]; [strongSelf.cameraDelegate draw:strongSelf.frameTime];
[strongSelf.cameraDelegate externalRender:strongSelf.frameTime [strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.cameraTexture
targetTexture:strongSelf.shareTexture
commandBuffer:commandBuffer];
[EAGLContext setCurrentContext:self.openGLContext];
IOSurfaceID surfaceId = [strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.cameraTexture
onScreenTexture:strongSelf.shareTexture onScreenTexture:strongSelf.shareTexture
frameTime:strongSelf.frameTime * 1000]; frameTime:strongSelf.frameTime * 1000];
IOSurfaceID surfaceId = [strongSelf.cameraDelegate externalRender:strongSelf.frameTime
targetTexture:strongSelf.cameraTexture
commandBuffer:commandBuffer];
if (surfaceId != -1) { if (surfaceId != -1) {
//这里渲染surfaceId //这里渲染surfaceId
IOSurfaceRef ioSurface = IOSurfaceLookup(surfaceId); IOSurfaceRef ioSurface = IOSurfaceLookup(surfaceId);
IOSurfaceLock(ioSurface, kIOSurfaceLockReadOnly, nil);
if (ioSurface) { if (ioSurface) {
if (self.lastIOSurfaceID != surfaceId || self.ioSurfaceTexture == nil) { if (strongSelf.lastIOSurfaceID != surfaceId || strongSelf.ioSurfaceTexture == nil) {
id<MTLTexture> texture; id<MTLTexture> texture;
MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor
texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
@ -254,18 +256,16 @@ NS_INLINE size_t QAAlignSize(size_t size)
mipmapped:NO]; mipmapped:NO];
textureDescriptor.storageMode = MTLStorageModeShared; textureDescriptor.storageMode = MTLStorageModeShared;
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead; textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
texture = [self.device newTextureWithDescriptor:textureDescriptor iosurface:ioSurface plane:0]; texture = [strongSelf.device newTextureWithDescriptor:textureDescriptor iosurface:ioSurface plane:0];
self.ioSurfaceTexture = texture; strongSelf.ioSurfaceTexture = texture;
textureDescriptor = nil; textureDescriptor = nil;
} }
IOSurfaceUnlock(ioSurface, kIOSurfaceLockReadOnly, nil); strongSelf.lastIOSurfaceID = surfaceId;
CFRelease(ioSurface); if (strongSelf.ioSurfaceTexture) {
IOSurfaceLock(ioSurface, kIOSurfaceLockReadOnly, nil);
self.lastIOSurfaceID = surfaceId;
if (self.ioSurfaceTexture) {
[strongSelf.mtlRender renderToTexture:drawable.texture [strongSelf.mtlRender renderToTexture:drawable.texture
from:self.ioSurfaceTexture from:strongSelf.ioSurfaceTexture
commandBuffer:commandBuffer commandBuffer:commandBuffer
textureCoordinate:strongSelf.mtlRender.noRotationBuffer]; textureCoordinate:strongSelf.mtlRender.noRotationBuffer];
if (drawable) { if (drawable) {
@ -273,6 +273,8 @@ NS_INLINE size_t QAAlignSize(size_t size)
[commandBuffer addCompletedHandler:renderCompleted]; [commandBuffer addCompletedHandler:renderCompleted];
[commandBuffer commit]; [commandBuffer commit];
} }
IOSurfaceUnlock(ioSurface, kIOSurfaceLockReadOnly, nil);
CFRelease(ioSurface);
return; return;
} }
@ -299,6 +301,37 @@ NS_INLINE size_t QAAlignSize(size_t size)
} }
- (void)renderPixelbuffer:(CVPixelBufferRef)pixelbuffer
{
if (self.isPaused) {
return;
}
if (dispatch_semaphore_wait(self.cameraFrameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
dispatch_semaphore_t block_camera_sema = self.cameraFrameRenderingSemaphore;
__strong OlaMTLCameraRenderView *weakSelf = self;
void (^renderCompleted)(id<MTLCommandBuffer> buffer) = ^(id<MTLCommandBuffer> buffer)
{
dispatch_semaphore_signal(block_camera_sema);
};
CFRetain(pixelbuffer);
dispatch_async(self.displayRenderQueue, ^{
if (weakSelf == nil) {
CFRelease(pixelbuffer);
return;
}
__strong OlaMTLCameraRenderView *strongSelf = weakSelf;
[strongSelf.mtlRender renderToCameraTextureWithPixelBuffer:pixelbuffer completedHandler:renderCompleted];
CFRelease(pixelbuffer);
});
}
- (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer - (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer
{ {
if (self.isPaused) { if (self.isPaused) {

View File

@ -63,7 +63,7 @@ cc_library(
copts = select({ copts = select({
"//mediapipe:apple": [ "//mediapipe:apple": [
"-x objective-c++", "-x objective-c++",
"-fobjc-arc", # enable reference-counting # "-fobjc-arc", # enable reference-counting
], ],
"//conditions:default": ["-std=c++17"], "//conditions:default": ["-std=c++17"],
}), }),

View File

@ -29,8 +29,8 @@ namespace Opipe
FaceMeshBeautyRender::~FaceMeshBeautyRender() FaceMeshBeautyRender::~FaceMeshBeautyRender()
{ {
_olaBeautyFilter->removeAllTargets(); _olaBeautyFilter->removeAllTargets();
if (_olaBeautyFilter) if (_olaBeautyFilter)
{ {
_olaBeautyFilter->release(); _olaBeautyFilter->release();
@ -122,6 +122,13 @@ namespace Opipe
return outputTexture; return outputTexture;
} }
void FaceMeshBeautyRender::setFacePoints(std::vector<Vec2> facePoints) {
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("face", facePoints);
}
}
float FaceMeshBeautyRender::getSmoothing() float FaceMeshBeautyRender::getSmoothing()
{ {
return _smoothing; return _smoothing;
@ -137,7 +144,7 @@ namespace Opipe
_smoothing = smoothing; _smoothing = smoothing;
if (_olaBeautyFilter) if (_olaBeautyFilter)
{ {
_olaBeautyFilter->setSmoothing(smoothing); _olaBeautyFilter->setProperty("skin", smoothing);
} }
} }
@ -146,8 +153,30 @@ namespace Opipe
_whitening = whitening; _whitening = whitening;
if (_olaBeautyFilter) if (_olaBeautyFilter)
{ {
_olaBeautyFilter->setWhitening(whitening); _olaBeautyFilter->setProperty("whiten", whitening);
} }
} }
void FaceMeshBeautyRender::setNoseFactor(float noseFactor) {
_noseFactor = noseFactor;
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("nose", noseFactor);
}
}
void FaceMeshBeautyRender::setFaceSlim(float slimFactor) {
_faceFactor = slimFactor;
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("slim", slimFactor);
}
}
void FaceMeshBeautyRender::setEye(float eyeFactor) {
_eyeFactor = eyeFactor;
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("eye", eyeFactor);
}
}
} }

View File

@ -4,7 +4,7 @@
#include "mediapipe/render/module/beauty/filters/OlaBeautyFilter.hpp" #include "mediapipe/render/module/beauty/filters/OlaBeautyFilter.hpp"
#include "mediapipe/render/core/OlaShareTextureFilter.hpp" #include "mediapipe/render/core/OlaShareTextureFilter.hpp"
#include "mediapipe/render/core/SourceImage.hpp" #include "mediapipe/render/core/SourceImage.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
namespace Opipe { namespace Opipe {
class FaceMeshBeautyRender { class FaceMeshBeautyRender {
public: public:
@ -25,6 +25,18 @@ namespace Opipe {
/// 美白 /// 美白
float getWhitening(); float getWhitening();
float getEye() {
return _eyeFactor;
}
float getFace() {
return _faceFactor;
}
float getNose() {
return _noseFactor;
}
/// 磨皮 /// 磨皮
/// @param smoothing 磨皮 0.0 - 1.0 /// @param smoothing 磨皮 0.0 - 1.0
@ -34,12 +46,29 @@ namespace Opipe {
/// 美白 /// 美白
/// @param whitening 美白 0.0 - 1.0 /// @param whitening 美白 0.0 - 1.0
void setWhitening(float whitening); void setWhitening(float whitening);
/// 设置人脸点 mediapipe版
void setFacePoints(std::vector<Vec2> facePoints);
// 大眼
void setEye(float eyeFactor);
// 瘦脸
void setFaceSlim(float slimFactor);
// 瘦鼻
void setNoseFactor(float noseFactor);
private: private:
OlaBeautyFilter *_olaBeautyFilter = nullptr; OlaBeautyFilter *_olaBeautyFilter = nullptr;
OlaShareTextureFilter *_outputFilter = nullptr; OlaShareTextureFilter *_outputFilter = nullptr;
Framebuffer *_inputFramebuffer = nullptr; Framebuffer *_inputFramebuffer = nullptr;
float _smoothing = 0.0; float _smoothing = 0.0;
float _whitening = 0.0; float _whitening = 0.0;
float _noseFactor = 0.0;
float _faceFactor = 0.0;
float _eyeFactor = 0.0;
bool _isRendering = false; bool _isRendering = false;
Context *_context = nullptr; Context *_context = nullptr;
SourceImage *_lutImage = nullptr; SourceImage *_lutImage = nullptr;

View File

@ -116,6 +116,20 @@ namespace Opipe
/// 美白 /// 美白
virtual float getWhitening() = 0; virtual float getWhitening() = 0;
/// 瘦脸
virtual float getSlim() = 0;
virtual float getEye() = 0;
/// 瘦鼻
virtual float getNose() = 0;
virtual void setSlim(float slim) = 0;
virtual void setNose(float nose) = 0;
virtual void setEye(float eye) = 0;
/// 磨皮 /// 磨皮
/// @param smoothing 磨皮 0.0 - 1.0 /// @param smoothing 磨皮 0.0 - 1.0

View File

@ -1,5 +1,10 @@
#include "face_mesh_module_imp.h" #include "face_mesh_module_imp.h"
#include "mediapipe/render/core/Context.hpp" #include "mediapipe/render/core/Context.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
#if TestTemplateFace
#include "mediapipe/render/core/CVFramebuffer.hpp"
#import <UIKit/UIKit.h>
#endif
static const char* kNumFacesInputSidePacket = "num_faces"; static const char* kNumFacesInputSidePacket = "num_faces";
static const char* kLandmarksOutputStream = "multi_face_landmarks"; static const char* kLandmarksOutputStream = "multi_face_landmarks";
@ -18,45 +23,43 @@ namespace Opipe
} }
#if defined(__APPLE__) #if defined(__APPLE__)
void FaceMeshCallFrameDelegate::outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer, void FaceMeshCallFrameDelegate::outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer,
const std::string &streamName, int64_t timstamp) const std::string &streamName, int64_t timestamp)
{ {
_imp->currentDispatch()->runSync([&] {
IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixelbuffer);
IOSurfaceID surfaceId = IOSurfaceGetID(surface);
Log("Opipe", "streamName %s timeStamp:%ld iosurfaceid:%d", streamName.c_str(), timestamp, surfaceId);
});
} }
#endif #endif
void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet, const std::string &streamName) { void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet, const std::string &streamName) {
#if defined(__APPLE__)
NSLog(@"streamName:%@ ts:%lld 是否有人脸:%@", [NSString stringWithUTF8String:streamName.c_str()],
packet.Timestamp().Value(), @(_hasFace));
#endif
if (_imp == nullptr) { if (_imp == nullptr) {
return; return;
} }
_imp->currentDispatch()->runSync([&] {
if (streamName == kLandmarksOutputStream) { if (streamName == kLandmarksOutputStream) {
_last_landmark_ts = packet.Timestamp().Value(); _last_landmark_ts = packet.Timestamp().Value();
if (_last_video_ts == _last_landmark_ts) {
//有人脸
_hasFace = true; _hasFace = true;
const auto& multi_face_landmarks = packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>(); const auto& multi_face_landmarks = packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>();
_lastLandmark = multi_face_landmarks[0]; _lastLandmark = multi_face_landmarks[0];
} }
} Log("FaceMeshModule", "landmarkts:%ld", _last_landmark_ts);
if (_last_video_ts != _last_landmark_ts) { if (packet.Timestamp().Value() != _last_landmark_ts) {
_hasFace = false; _hasFace = false;
_last_landmark_ts = 0; //输出过一次的时间戳 不再输出
} }
_last_video_ts = packet.Timestamp().Value();
if (_hasFace) { if (_hasFace) {
_imp->setLandmark(_lastLandmark); _imp->setLandmark(_lastLandmark, packet.Timestamp().Value());
} else { } else {
_imp->setLandmark(_emptyLandmark); _imp->setLandmark(_emptyLandmark, packet.Timestamp().Value());
} }
}, Opipe::Context::IOContext);
} }
void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet, void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet,
@ -163,27 +166,34 @@ namespace Opipe
_dispatch->runSync([&] { _dispatch->runSync([&] {
if (_render == nullptr) { if (_render == nullptr) {
_render = new FaceMeshBeautyRender(_context); _render = new FaceMeshBeautyRender(_context);
#if TestTemplateFace
UIImage *image = [UIImage imageNamed:@"templateFace"];
_templateFace = SourceImage::create(_context, image);
#endif
} }
}); });
} }
return true; return true;
} }
void FaceMeshModuleIMP::setLandmark(NormalizedLandmarkList landmark) void FaceMeshModuleIMP::setLandmark(NormalizedLandmarkList landmark, int64_t timeStamp)
{ {
_lastLandmark = std::move(landmark); _lastLandmark = std::move(landmark);
// if (_lastLandmark.landmark_size() == 0) {
//#if defined(__APPLE__) if (_lastLandmark.landmark_size() == 0) {
// NSLog(@"没有人脸"); Log("FaceMeshModule", "没有检测到人脸");
//#endif
// } } else {
// for (int i = 0; i < _lastLandmark.landmark_size(); ++i) { // _graph->cosumeFrame();
//#if defined(__APPLE__) // _graph->closeAllInputStreams();
// NSLog(@"######## Set Landmark[%d]: (%f, %f, %f)", i, _lastLandmark.landmark(i).x(), Log("FaceMeshModule", "检测到人脸输出");
// _lastLandmark.landmark(i).y(), _lastLandmark.landmark(i).z()); }
//#endif
// }
} }
void FaceMeshModuleIMP::startModule() void FaceMeshModuleIMP::startModule()
@ -193,6 +203,7 @@ namespace Opipe
return; return;
} }
_isInit = _graph->start(); _isInit = _graph->start();
_graph->setUseVideoOutput(false);
} }
void FaceMeshModuleIMP::stopModule() void FaceMeshModuleIMP::stopModule()
@ -216,11 +227,24 @@ namespace Opipe
return; return;
} }
Timestamp ts(timeStamp * 1000); Timestamp ts(timeStamp * 1000);
#if TestTemplateFace
auto *framebuffer = dynamic_cast<CVFramebuffer *>(_templateFace->getFramebuffer());
CVPixelBufferRef renderTarget = framebuffer->renderTarget;
framebuffer->lockAddress();
_graph->sendPixelBuffer(renderTarget, "input_video",
MPPPacketTypePixelBuffer,
ts);
framebuffer->unlockAddress();
#else
CVPixelBufferLockBaseAddress(pixelbuffer, 0); CVPixelBufferLockBaseAddress(pixelbuffer, 0);
_graph->sendPixelBuffer(pixelbuffer, "input_video", _graph->sendPixelBuffer(pixelbuffer, "input_video",
MPPPacketTypePixelBuffer, MPPPacketTypePixelBuffer,
ts); ts);
CVPixelBufferUnlockBaseAddress(pixelbuffer, 0); CVPixelBufferUnlockBaseAddress(pixelbuffer, 0);
#endif
} }
#endif #endif
@ -254,18 +278,20 @@ namespace Opipe
_dispatch->runSync([&] { _dispatch->runSync([&] {
// GLsync sync;
// _dispatch->runAsync([&] {
// _render->renderTexture(inputTexture);
// sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
// glFlush();
// });
// glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
// glDeleteSync(sync);
_render->renderTexture(inputTexture); _render->renderTexture(inputTexture);
}); });
textureInfo = _render->outputRenderTexture(inputTexture); textureInfo = _render->outputRenderTexture(inputTexture);
std::vector<Vec2> facePoints;
if (_lastLandmark.landmark_size() > 0) {
Log("FaceMeshModule", "检测到人脸输出");
for (int i = 0; i < _lastLandmark.landmark_size(); i++) {
facePoints.emplace_back( _lastLandmark.landmark(i).x(), _lastLandmark.landmark(i).y());
}
Log("FaceMeshModule", "检测到人脸输完毕");
}
_render->setFacePoints(facePoints);
return textureInfo; return textureInfo;
} }

View File

@ -8,6 +8,12 @@
#include "face_mesh_module.h" #include "face_mesh_module.h"
#include "face_mesh_beauty_render.h" #include "face_mesh_beauty_render.h"
#define TestTemplateFace 0
#if TestTemplateFace
#include "mediapipe/render/core/SourceImage.hpp"
#endif
namespace Opipe namespace Opipe
{ {
class FaceMeshModuleIMP; class FaceMeshModuleIMP;
@ -32,7 +38,6 @@ namespace Opipe
private: private:
int64_t _last_landmark_ts = 0; int64_t _last_landmark_ts = 0;
int64_t _last_video_ts = 0;
bool _hasFace = false; bool _hasFace = false;
NormalizedLandmarkList _lastLandmark; NormalizedLandmarkList _lastLandmark;
NormalizedLandmarkList _emptyLandmark; NormalizedLandmarkList _emptyLandmark;
@ -79,7 +84,7 @@ namespace Opipe
virtual TextureInfo renderTexture(TextureInfo inputTexture) override; virtual TextureInfo renderTexture(TextureInfo inputTexture) override;
virtual void setLandmark(NormalizedLandmarkList landmark); virtual void setLandmark(NormalizedLandmarkList landmark, int64_t timestamp);
/// 磨皮 /// 磨皮
float getSmoothing() override { float getSmoothing() override {
@ -91,20 +96,46 @@ namespace Opipe
return _render->getWhitening(); return _render->getWhitening();
} }
float getEye() override {
return _render->getEye();
}
float getSlim() override {
return _render->getFace();
}
float getNose() override {
return _render->getNose();
}
/// 磨皮 /// 磨皮
/// @param smoothing 磨皮 0.0 - 1.0 /// @param smoothing 磨皮 0.0 - 1.0
void setSmoothing(float smoothing) { void setSmoothing(float smoothing) override {
_render->setSmoothing(smoothing); _render->setSmoothing(smoothing);
} }
/// 美白 /// 美白
/// @param whitening 美白 0.0 - 1.0 /// @param whitening 美白 0.0 - 1.0
void setWhitening(float whitening) { void setWhitening(float whitening) override {
_render->setWhitening(whitening); _render->setWhitening(whitening);
} }
void setEye(float eye) override {
_render->setEye(eye);
}
void setSlim(float slim) override {
_render->setFaceSlim(slim);
}
void setNose(float nose) override {
_render->setNoseFactor(nose);
}
OpipeDispatch* currentDispatch() {
return _dispatch.get();
}
private: private:
std::unique_ptr<OpipeDispatch> _dispatch; std::unique_ptr<OpipeDispatch> _dispatch;
std::unique_ptr<OlaGraph> _graph; std::unique_ptr<OlaGraph> _graph;
@ -114,6 +145,10 @@ namespace Opipe
std::shared_ptr<FaceMeshCallFrameDelegate> _delegate; std::shared_ptr<FaceMeshCallFrameDelegate> _delegate;
FaceMeshBeautyRender *_render = nullptr; FaceMeshBeautyRender *_render = nullptr;
OlaContext *_olaContext = nullptr; OlaContext *_olaContext = nullptr;
#if TestTemplateFace
SourceImage *_templateFace = nullptr;
#endif
}; };
} }
#endif #endif

View File

@ -34,7 +34,7 @@ cc_library(
copts = select({ copts = select({
"//mediapipe:apple": [ "//mediapipe:apple": [
"-x objective-c++", "-x objective-c++",
"-fobjc-arc", # enable reference-counting # "-fobjc-arc", # enable reference-counting
], ],
"//conditions:default": ["-std=c++17"], "//conditions:default": ["-std=c++17"],
}), }),

View File

@ -18,6 +18,7 @@ namespace Opipe
uniform int count; uniform int count;
uniform float eye; uniform float eye;
uniform float slim; uniform float slim;
uniform float nose;
uniform int debug; uniform int debug;
void main() { void main() {
vec2 uv = texCoord.xy; vec2 uv = texCoord.xy;
@ -58,6 +59,16 @@ namespace Opipe
uv = vec2(textureCoordinateToUse.x - (delta * directionX), uv = vec2(textureCoordinateToUse.x - (delta * directionX),
textureCoordinateToUse.y - (delta * directionY)); textureCoordinateToUse.y - (delta * directionY));
} }
else if (types[i] == 3)
{
float dist = 1.0 - d;
float delta = scale[i] * dist * nose;
float deltaScale = smoothstep(u_min[i], u_max[i], dist);
float directionX = cos(angle[i]) * deltaScale;
float directionY = sin(angle[i]) * deltaScale / (3.0 / 4.0 * aspectRatio);
uv = vec2(textureCoordinateToUse.x - (delta * directionX),
textureCoordinateToUse.y - (delta * directionY));
}
} }
} }
vTexCoord = uv; vTexCoord = uv;
@ -214,6 +225,12 @@ namespace Opipe
return angle; return angle;
} }
Vector2 FaceDistortionFilter::_positionAt(int index) {
float x = (_facePoints[index].x - 0.5) * 2.0;
float y = (_facePoints[index].y - 0.5) * 2.0;
return Vector2(x, y);
}
void FaceDistortionFilter::setUniform() void FaceDistortionFilter::setUniform()
{ {
if (_facePoints.size() > 60) if (_facePoints.size() > 60)
@ -225,15 +242,14 @@ namespace Opipe
_filterProgram->setUniformValue("aspectRatio", _filterProgram->setUniformValue("aspectRatio",
height / height /
width); width);
_filterProgram->setUniformValue("eye", _eye); _filterProgram->setUniformValue("eye", _eye);
_filterProgram->setUniformValue("slim", _slim); _filterProgram->setUniformValue("slim", _slim);
_filterProgram->setUniformValue("nose", _nose);
//左眼放大 //左眼放大
{ {
Vector2 point1 = Vector2(_facePoints[75].x, _facePoints[75].y); Vector2 point1 = _positionAt(362);
Vector2 point2 = Vector2(_facePoints[79].x, _facePoints[79].y); Vector2 point2 = _positionAt(263);
Vector2 point3 = Vector2(_facePoints[65].x, _facePoints[65].y); Vector2 point3 = _positionAt(417);
Vector2 center = point1.getCenter(point2); Vector2 center = point1.getCenter(point2);
float distance = center.distance(point3); float distance = center.distance(point3);
addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1); addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1);
@ -241,9 +257,9 @@ namespace Opipe
//右眼放大 //右眼放大
{ {
Vector2 point1 = Vector2(_facePoints[66].x, _facePoints[66].y); Vector2 point1 = _positionAt(33);
Vector2 point2 = Vector2(_facePoints[70].x, _facePoints[70].y); Vector2 point2 = _positionAt(133);
Vector2 point3 = Vector2(_facePoints[55].x, _facePoints[55].y); Vector2 point3 = _positionAt(193);
Vector2 center = point1.getCenter(point2); Vector2 center = point1.getCenter(point2);
float distance = center.distance(point3); float distance = center.distance(point3);
addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1); addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1);
@ -251,10 +267,10 @@ namespace Opipe
//瘦左脸 //瘦左脸
{ {
Vector2 point1 = Vector2(_facePoints[11].x, _facePoints[11].y); Vector2 point1 = _positionAt(136);
Vector2 point2 = Vector2(_facePoints[60].x, _facePoints[60].y); Vector2 point2 = _positionAt(19);
Vector2 point3 = Vector2(_facePoints[4].x, _facePoints[4].y); Vector2 point3 = _positionAt(234);
Vector2 point4 = Vector2(_facePoints[16].x, _facePoints[16].y); Vector2 point4 = _positionAt(152);
float angle = getRadius(point2, point1); float angle = getRadius(point2, point1);
addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle, addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle,
@ -263,10 +279,10 @@ namespace Opipe
} }
//瘦右脸 //瘦右脸
{ {
Vector2 point1 = Vector2(_facePoints[21].x, _facePoints[21].y); Vector2 point1 = _positionAt(379);
Vector2 point2 = Vector2(_facePoints[60].x, _facePoints[60].y); Vector2 point2 = _positionAt(19);
Vector2 point3 = Vector2(_facePoints[28].x, _facePoints[28].y); Vector2 point3 = _positionAt(454);
Vector2 point4 = Vector2(_facePoints[16].x, _facePoints[16].y); Vector2 point4 = _positionAt(152);
float angle = getRadius(point2, point1); float angle = getRadius(point2, point1);
addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle, addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle,

View File

@ -19,15 +19,32 @@ namespace Opipe
virtual bool proceed(float frameTime = 0.0, bool bUpdateTargets = true) override; virtual bool proceed(float frameTime = 0.0, bool bUpdateTargets = true) override;
public: public:
float eye() {
return _eye;
}
float slim() {
return _slim;
}
float nose() {
return _nose;
}
void setEye(float eye) void setEye(float eye)
{ {
_eye = eye; _eye = eye;
}; }
void setSlim(float slim) void setSlim(float slim)
{ {
_slim = slim; _slim = slim;
}; }
void setNose(float nose)
{
_nose = nose;
}
void setFacePoints(std::vector<Vec2> facePoints) void setFacePoints(std::vector<Vec2> facePoints)
{ {
@ -53,7 +70,9 @@ namespace Opipe
float _u_min[20]; float _u_min[20];
float _u_max[20]; float _u_max[20];
int _types[20]; int _types[20];
float _u_facePoints[212]; float _u_facePoints[980];
Vector2 _positionAt(int index);
private: private:
void generateDistoritionVBO(int numX, int numY, const GLfloat *imageTexUV); void generateDistoritionVBO(int numX, int numY, const GLfloat *imageTexUV);
@ -62,6 +81,7 @@ namespace Opipe
private: private:
float _eye = 0.0; float _eye = 0.0;
float _slim = 0.0; float _slim = 0.0;
float _nose = 0.0;
std::vector<Vec2> _facePoints; //暂时支持单个人脸 std::vector<Vec2> _facePoints; //暂时支持单个人脸
GLuint vao = -1; GLuint vao = -1;
GLuint eao = -1; GLuint eao = -1;

View File

@ -1,4 +1,5 @@
#include "OlaBeautyFilter.hpp" #include "OlaBeautyFilter.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
namespace Opipe { namespace Opipe {
OlaBeautyFilter::OlaBeautyFilter(Context *context) : FilterGroup(context) OlaBeautyFilter::OlaBeautyFilter(Context *context) : FilterGroup(context)
@ -67,7 +68,7 @@ namespace Opipe {
_lutFilter = LUTFilter::create(context); _lutFilter = LUTFilter::create(context);
_unSharpMaskFilter = UnSharpMaskFilter::create(context); _unSharpMaskFilter = UnSharpMaskFilter::create(context);
_unSharpMaskFilter->addTarget(_lutFilter, 0); _unSharpMaskFilter->addTarget(_lutFilter, 0);
_faceDistortFilter = FaceDistortionFilter::create(context);
_bilateralAdjustFilter = BilateralAdjustFilter::create(context); _bilateralAdjustFilter = BilateralAdjustFilter::create(context);
addFilter(_bilateralAdjustFilter); addFilter(_bilateralAdjustFilter);
@ -75,11 +76,6 @@ namespace Opipe {
_lookUpGroupFilter->addFilter(_unSharpMaskFilter); _lookUpGroupFilter->addFilter(_unSharpMaskFilter);
_alphaBlendFilter = AlphaBlendFilter::create(context); _alphaBlendFilter = AlphaBlendFilter::create(context);
// addFilter(_lookUpGroupFilter);
// addFilter(_lutFilter);
// setTerminalFilter(_lutFilter);
_bilateralFilter = BilateralFilter::create(context); _bilateralFilter = BilateralFilter::create(context);
addFilter(_bilateralFilter); addFilter(_bilateralFilter);
@ -88,9 +84,8 @@ namespace Opipe {
_bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->addTarget(_alphaBlendFilter, 0); _bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->addTarget(_alphaBlendFilter, 0);
_alphaBlendFilter->setMix(0.0); _alphaBlendFilter->setMix(0.8);
setTerminalFilter(_alphaBlendFilter);
_bilateralAdjustFilter->setOpacityLimit(0.6); _bilateralAdjustFilter->setOpacityLimit(0.6);
_bilateralFilter->setDistanceNormalizationFactor(2.746); _bilateralFilter->setDistanceNormalizationFactor(2.746);
@ -99,45 +94,46 @@ namespace Opipe {
_unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false); _unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false);
_unSharpMaskFilter->setIntensity(1.365); _unSharpMaskFilter->setIntensity(1.365);
// _bilateralFilter = BilateralFilter::create(context); _alphaBlendFilter->addTarget(_faceDistortFilter);
// addFilter(_bilateralFilter);
//
// _bilateralAdjustFilter = BilateralAdjustFilter::create(context);
// addFilter(_bilateralAdjustFilter);
//
// _unSharpMaskFilter = UnSharpMaskFilter::create(context);
//
// _lutFilter = LUTFilter::create(context);
// _unSharpMaskFilter->addTarget(_lutFilter, 0);
//
// _lookUpGroupFilter = FilterGroup::create(context);
// _lookUpGroupFilter->addFilter(_unSharpMaskFilter);
//
// _alphaBlendFilter = AlphaBlendFilter::create(context);
// _faceDistortFilter = FaceDistortionFilter::create(context);
//
//
// _bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->
// addTarget(_alphaBlendFilter, 0);
//
// _bilateralAdjustFilter->addTarget(_lookUpGroupFilter)->
// addTarget(_alphaBlendFilter, 1)->addTarget(_faceDistortFilter);
//
// _alphaBlendFilter->setMix(0.8);
//
// _unSharpMaskFilter->setBlurRadiusInPixel(4.0f, true);
// _unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false);
// _unSharpMaskFilter->setIntensity(1.365);
//
// _bilateralAdjustFilter->setOpacityLimit(0.6);
//
// _bilateralFilter->setDistanceNormalizationFactor(2.746);
// _bilateralFilter->setTexelSpacingMultiplier(2.7);
//
// setTerminalFilter(_faceDistortFilter);
setTerminalFilter(_faceDistortFilter);
std::vector<Vec2> defaultFace; std::vector<Vec2> defaultFace;
registerProperty("face", defaultFace, "人脸点", [this](std::vector<Vec2> facePoints) {
_faceDistortFilter->setFacePoints(facePoints);
});
registerProperty("eye", 0.0f, "大眼 0.0 - 1.0",
[this](float eye) {
_faceDistortFilter->setEye(eye);
});
registerProperty("slim", 0.0f, "瘦脸 0.0 - 1.0",
[this](float slim) {
_faceDistortFilter->setSlim(slim);
});
registerProperty("nose", 0.0f, "瘦鼻 0.0 - 1.0",
[this](float nose) {
_faceDistortFilter->setNose(nose);
});
registerProperty("skin", 0.0f, "磨皮 0.0 - 1.0",
[this](float skin) {
if (skin == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(skin);
}
});
registerProperty("whiten", 0.0f, "美白 0.0 - 1.0",
[this](float whiten) {
_alphaBlendFilter->setMix(whiten);
});
return true; return true;
} }
@ -169,39 +165,4 @@ namespace Opipe {
} }
} }
void OlaBeautyFilter::setSmoothing(float smoothing) {
if (_bilateralAdjustFilter == nullptr) {
return;
}
if (smoothing == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(smoothing);
}
}
float OlaBeautyFilter::getSmoothing() {
if (_bilateralAdjustFilter) {
return _bilateralAdjustFilter->getOpacityLimit();
}
return 0.0;
}
void OlaBeautyFilter::setWhitening(float whitening) {
if (_alphaBlendFilter) {
_alphaBlendFilter->setMix(whitening);
}
_lutFilter->setStep(whitening);
}
float OlaBeautyFilter::getWhitening() {
if (_alphaBlendFilter) {
return _alphaBlendFilter->getMix();
}
return 0.0;
}
} }

View File

@ -12,14 +12,6 @@ namespace Opipe
{ {
class OlaBeautyFilter : public FilterGroup class OlaBeautyFilter : public FilterGroup
{ {
public:
float getSmoothing();
float getWhitening();
void setSmoothing(float smoothing);
void setWhitening(float whitening);
public: public:
static OlaBeautyFilter *create(Context *context); static OlaBeautyFilter *create(Context *context);
@ -42,35 +34,6 @@ namespace Opipe
virtual ~OlaBeautyFilter(); virtual ~OlaBeautyFilter();
void setFacePoints(std::vector<Vec2> facePoints) {
_faceDistortFilter->setFacePoints(facePoints);
}
// "大眼 0.0 - 1.0"
void setEye(float eye) {
_faceDistortFilter->setEye(eye);
}
//1.0f, "瘦脸 0.0 - 1.0",
void setSlim(float slim) {
_faceDistortFilter->setSlim(slim);
}
// "磨皮 0.0 - 1.0"
void setSkin(float skin) {
if (skin == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(skin);
}
}
// "美白 0.0 - 1.0"
void setWhiten(float whiten) {
_alphaBlendFilter->setMix(whiten);
}
private: private:
BilateralFilter *_bilateralFilter = 0; BilateralFilter *_bilateralFilter = 0;
AlphaBlendFilter *_alphaBlendFilter = 0; AlphaBlendFilter *_alphaBlendFilter = 0;
@ -79,7 +42,6 @@ namespace Opipe
UnSharpMaskFilter *_unSharpMaskFilter = 0; UnSharpMaskFilter *_unSharpMaskFilter = 0;
FaceDistortionFilter *_faceDistortFilter = 0; FaceDistortionFilter *_faceDistortFilter = 0;
FilterGroup *_lookUpGroupFilter = 0; FilterGroup *_lookUpGroupFilter = 0;
SourceImage *_lutImage = 0; SourceImage *_lutImage = 0;
}; };
} }

View File

@ -7,7 +7,7 @@
<key>OpipeBeautyModuleExample.xcscheme_^#shared#^_</key> <key>OpipeBeautyModuleExample.xcscheme_^#shared#^_</key>
<dict> <dict>
<key>orderHint</key> <key>orderHint</key>
<integer>7</integer> <integer>3</integer>
</dict> </dict>
</dict> </dict>
<key>SuppressBuildableAutocreation</key> <key>SuppressBuildableAutocreation</key>

View File

@ -0,0 +1,21 @@
{
"images" : [
{
"filename" : "templateFace.jpg",
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -17,7 +17,7 @@
<rect key="frame" x="0.0" y="0.0" width="414" height="896"/> <rect key="frame" x="0.0" y="0.0" width="414" height="896"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews> <subviews>
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="0.5" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="vhS-oS-Fej"> <slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="vhS-oS-Fej">
<rect key="frame" x="55" y="632" width="304" height="31"/> <rect key="frame" x="55" y="632" width="304" height="31"/>
<constraints> <constraints>
<constraint firstAttribute="width" constant="300" id="YLm-AJ-oyY"/> <constraint firstAttribute="width" constant="300" id="YLm-AJ-oyY"/>
@ -26,12 +26,81 @@
<action selector="beautyChanged:" destination="BYZ-38-t0r" eventType="valueChanged" id="j3u-PR-SZh"/> <action selector="beautyChanged:" destination="BYZ-38-t0r" eventType="valueChanged" id="j3u-PR-SZh"/>
</connections> </connections>
</slider> </slider>
<slider opaque="NO" tag="1" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="Qqv-lK-sGM">
<rect key="frame" x="55" y="703" width="304" height="31"/>
<constraints>
<constraint firstAttribute="width" constant="300" id="RfD-NJ-btk"/>
<constraint firstAttribute="width" relation="greaterThanOrEqual" constant="300" id="YrT-Ue-dTy"/>
</constraints>
<connections>
<action selector="beautyChanged:" destination="BYZ-38-t0r" eventType="valueChanged" id="nM1-Gf-7Zs"/>
</connections>
</slider>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="美白磨皮" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="S8B-AB-YC8">
<rect key="frame" x="172" y="601" width="70" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="瘦脸" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Xqq-mI-Nvq">
<rect key="frame" x="189.5" y="672" width="35" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="大眼" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="vgF-XJ-zVO">
<rect key="frame" x="189.5" y="743" width="35" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="整形-缩鼻翼" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Mom-9E-xEB">
<rect key="frame" x="159.5" y="814" width="95" height="20.5"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
</label>
<slider opaque="NO" tag="2" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="wEp-us-rWn">
<rect key="frame" x="55" y="774" width="304" height="31"/>
<constraints>
<constraint firstAttribute="width" relation="greaterThanOrEqual" constant="300" id="SU3-lS-UVj"/>
<constraint firstAttribute="width" constant="300" id="d1T-V2-Y4S"/>
</constraints>
<connections>
<action selector="beautyChanged:" destination="BYZ-38-t0r" eventType="valueChanged" id="Kax-jU-pOr"/>
</connections>
</slider>
<slider opaque="NO" tag="3" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="axw-O5-vck">
<rect key="frame" x="55" y="844.5" width="304" height="31"/>
<constraints>
<constraint firstAttribute="width" relation="lessThanOrEqual" constant="300" id="cWo-IZ-VC0"/>
<constraint firstAttribute="width" relation="greaterThanOrEqual" constant="300" id="goO-a8-B8f"/>
<constraint firstAttribute="width" constant="300" id="tXo-vs-G4j"/>
</constraints>
<connections>
<action selector="beautyChanged:" destination="BYZ-38-t0r" eventType="valueChanged" id="WZa-J4-ckO"/>
</connections>
</slider>
</subviews> </subviews>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/> <viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/> <color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints> <constraints>
<constraint firstItem="Mom-9E-xEB" firstAttribute="top" secondItem="wEp-us-rWn" secondAttribute="bottom" constant="10" id="AJK-dn-fkm"/>
<constraint firstItem="vgF-XJ-zVO" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="DZZ-ez-8TZ"/>
<constraint firstItem="vhS-oS-Fej" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="JJ0-gj-buS"/> <constraint firstItem="vhS-oS-Fej" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="JJ0-gj-buS"/>
<constraint firstItem="wEp-us-rWn" firstAttribute="top" secondItem="vgF-XJ-zVO" secondAttribute="bottom" constant="10" id="NHk-jR-ewD"/>
<constraint firstItem="vhS-oS-Fej" firstAttribute="top" secondItem="S8B-AB-YC8" secondAttribute="bottom" constant="10" id="PL7-5c-jQt"/>
<constraint firstItem="Mom-9E-xEB" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="TYW-Gr-uFp"/>
<constraint firstItem="vgF-XJ-zVO" firstAttribute="top" secondItem="Qqv-lK-sGM" secondAttribute="bottom" constant="10" id="VIL-NW-R20"/>
<constraint firstItem="axw-O5-vck" firstAttribute="top" secondItem="Mom-9E-xEB" secondAttribute="bottom" constant="10" id="Vcm-Ek-dn6"/>
<constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="vhS-oS-Fej" secondAttribute="bottom" constant="200" id="cTV-Ue-aqY"/> <constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="vhS-oS-Fej" secondAttribute="bottom" constant="200" id="cTV-Ue-aqY"/>
<constraint firstItem="Xqq-mI-Nvq" firstAttribute="top" secondItem="vhS-oS-Fej" secondAttribute="bottom" constant="10" id="d5V-Bk-AdN"/>
<constraint firstItem="axw-O5-vck" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="dS4-8m-TsB"/>
<constraint firstItem="Xqq-mI-Nvq" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="elq-CK-DpX"/>
<constraint firstItem="S8B-AB-YC8" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="frG-6h-1EA"/>
<constraint firstItem="Qqv-lK-sGM" firstAttribute="top" secondItem="Xqq-mI-Nvq" secondAttribute="bottom" constant="10" id="juk-TL-vmD"/>
<constraint firstItem="wEp-us-rWn" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="of9-0R-JYD"/>
<constraint firstItem="Qqv-lK-sGM" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="vqt-Eo-Zus"/>
</constraints> </constraints>
</view> </view>
<navigationItem key="navigationItem" id="95p-Qv-uS6"/> <navigationItem key="navigationItem" id="95p-Qv-uS6"/>

View File

@ -64,6 +64,7 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
} }
[self setupSession]; [self setupSession];
[[OlaFaceUnity sharedInstance] resume];
} }
- (void)viewDidLayoutSubviews - (void)viewDidLayoutSubviews
@ -87,6 +88,8 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
[super viewWillAppear:animated]; [super viewWillAppear:animated];
[self startCapture]; [self startCapture];
[[OlaFaceUnity sharedInstance] resume]; [[OlaFaceUnity sharedInstance] resume];
[OlaFaceUnity sharedInstance].whiten = 0.0;
[OlaFaceUnity sharedInstance].smooth = 0.0;
} }
- (void)setupSession { - (void)setupSession {
@ -254,29 +257,26 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
} }
} }
- (IOSurfaceID)bgraCameraTextureReady:(OlaShareTexture *)texture - (void)bgraCameraTextureReady:(OlaShareTexture *)texture
onScreenTexture:(OlaShareTexture *)onScreenTexture onScreenTexture:(OlaShareTexture *)onScreenTexture
frameTime:(NSTimeInterval)frameTime frameTime:(NSTimeInterval)frameTime
{ {
[[OlaFaceUnity sharedInstance] processVideoFrame:onScreenTexture.renderTarget timeStamp:frameTime]; [[OlaFaceUnity sharedInstance] processVideoFrame:texture.renderTarget timeStamp:frameTime];
FaceTextureInfo inputTexture;
inputTexture.width = onScreenTexture.size.width;
inputTexture.height = onScreenTexture.size.height;
inputTexture.textureId = onScreenTexture.openGLTexture;
inputTexture.ioSurfaceId = onScreenTexture.surfaceID;
inputTexture.frameTime = frameTime;
FaceTextureInfo result = [[OlaFaceUnity sharedInstance] render:inputTexture];
NSLog(@"result ioSurfaceId:%d", result.ioSurfaceId);
return result.ioSurfaceId;
} }
- (void)externalRender:(NSTimeInterval)frameTime - (IOSurfaceID)externalRender:(NSTimeInterval)frameTime
targetTexture:(OlaShareTexture *)targetTexture targetTexture:(OlaShareTexture *)targetTexture
commandBuffer:(id<MTLCommandBuffer>)buffer commandBuffer:(id<MTLCommandBuffer>)buffer
{ {
FaceTextureInfo inputTexture;
inputTexture.width = targetTexture.size.width;
inputTexture.height = targetTexture.size.height;
inputTexture.textureId = targetTexture.openGLTexture;
inputTexture.ioSurfaceId = targetTexture.surfaceID;
inputTexture.frameTime = frameTime;
FaceTextureInfo result = [[OlaFaceUnity sharedInstance] render:inputTexture];
return result.ioSurfaceId;
} }
- (void)yuvTextureReady:(OlaShareTexture *)yTexture uvTexture:(OlaShareTexture *)uvTexture - (void)yuvTextureReady:(OlaShareTexture *)yTexture uvTexture:(OlaShareTexture *)uvTexture
@ -291,8 +291,16 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
- (IBAction)beautyChanged:(UISlider *)sender - (IBAction)beautyChanged:(UISlider *)sender
{ {
if (sender.tag == 0) {
[OlaFaceUnity sharedInstance].whiten = sender.value; [OlaFaceUnity sharedInstance].whiten = sender.value;
[OlaFaceUnity sharedInstance].smooth = sender.value; [OlaFaceUnity sharedInstance].smooth = sender.value;
} else if (sender.tag == 1) {
[OlaFaceUnity sharedInstance].slim = sender.value;
} else if (sender.tag == 2) {
[OlaFaceUnity sharedInstance].eyeFactor = sender.value;
} else if (sender.tag == 3) {
[OlaFaceUnity sharedInstance].nose = sender.value;
}
} }
@end @end

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

View File

@ -28,7 +28,8 @@ objc_library(
"@ios_opencv//:OpencvFramework", "@ios_opencv//:OpencvFramework",
], ],
data = [ data = [
"//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb", # "//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb",
"//mediapipe/graphs/face_mesh:face_mesh_mobile_landmark_gpu.binarypb",
"//mediapipe/modules/face_detection:face_detection_short_range.tflite", "//mediapipe/modules/face_detection:face_detection_short_range.tflite",
"//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite", "//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite",
"//mediapipe/render/module/beauty:whiten.png", "//mediapipe/render/module/beauty:whiten.png",
@ -36,7 +37,7 @@ objc_library(
copts = select({ copts = select({
"//mediapipe:apple": [ "//mediapipe:apple": [
"-x objective-c++", "-x objective-c++",
"-fobjc-arc", # enable reference-counting # "-fobjc-arc", # enable reference-counting
], ],
"//conditions:default": [], "//conditions:default": [],
}), }),

View File

@ -1,29 +1,29 @@
<Scheme version="1.3" LastUpgradeVersion="1000"> <Scheme LastUpgradeVersion="1000" version="1.3">
<BuildAction buildImplicitDependencies="YES" parallelizeBuildables="YES"> <BuildAction parallelizeBuildables="YES" buildImplicitDependencies="YES">
<BuildActionEntries> <BuildActionEntries>
<BuildActionEntry buildForTesting="YES" buildForRunning="YES" buildForProfiling="YES" buildForAnalyzing="YES" buildForArchiving="YES"> <BuildActionEntry buildForAnalyzing="YES" buildForTesting="YES" buildForRunning="YES" buildForArchiving="YES" buildForProfiling="YES">
<BuildableReference BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableIdentifier="primary" BlueprintName="OlaFaceUnityFramework" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj"></BuildableReference> <BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary" BuildableName="OlaFaceUnityFramework.framework" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference>
</BuildActionEntry> </BuildActionEntry>
</BuildActionEntries> </BuildActionEntries>
</BuildAction> </BuildAction>
<TestAction buildConfiguration="__TulsiTestRunner_Debug" shouldUseLaunchSchemeArgsEnv="YES" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB"> <TestAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB">
<Testables></Testables> <Testables></Testables>
<BuildableProductRunnable runnableDebuggingMode="0"> <BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework"></BuildableReference> <BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableName="OlaFaceUnityFramework.framework" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BlueprintName="OlaFaceUnityFramework"></BuildableReference>
</BuildableProductRunnable> </BuildableProductRunnable>
</TestAction> </TestAction>
<LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" debugServiceExtension="internal" allowLocationSimulation="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" buildConfiguration="Debug" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" launchStyle="0" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" ignoresPersistentStateOnLaunch="NO"> <LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" ignoresPersistentStateOnLaunch="NO" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" allowLocationSimulation="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" debugServiceExtension="internal" launchStyle="0" buildConfiguration="Debug">
<EnvironmentVariables></EnvironmentVariables> <EnvironmentVariables></EnvironmentVariables>
<BuildableProductRunnable runnableDebuggingMode="0"> <BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintName="OlaFaceUnityFramework" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference> <BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework"></BuildableReference>
</BuildableProductRunnable> </BuildableProductRunnable>
</LaunchAction> </LaunchAction>
<ProfileAction debugDocumentVersioning="YES" buildConfiguration="__TulsiTestRunner_Release" shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO"> <ProfileAction buildConfiguration="__TulsiTestRunner_Release" debugDocumentVersioning="YES" useCustomWorkingDirectory="NO" shouldUseLaunchSchemeArgsEnv="YES">
<BuildableProductRunnable runnableDebuggingMode="0"> <BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary"></BuildableReference> <BuildableReference BuildableName="OlaFaceUnityFramework.framework" BlueprintName="OlaFaceUnityFramework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference>
</BuildableProductRunnable> </BuildableProductRunnable>
</ProfileAction> </ProfileAction>
<AnalyzeAction buildConfiguration="Debug"></AnalyzeAction> <AnalyzeAction buildConfiguration="Debug"></AnalyzeAction>
<ArchiveAction buildConfiguration="Release" revealArchiveInOrganizer="YES"></ArchiveAction> <ArchiveAction revealArchiveInOrganizer="YES" buildConfiguration="Release"></ArchiveAction>
</Scheme> </Scheme>

View File

@ -2,28 +2,28 @@
<Scheme version="1.3" LastUpgradeVersion="1000"> <Scheme version="1.3" LastUpgradeVersion="1000">
<BuildAction parallelizeBuildables="YES" buildImplicitDependencies="YES"> <BuildAction parallelizeBuildables="YES" buildImplicitDependencies="YES">
<BuildActionEntries> <BuildActionEntries>
<BuildActionEntry buildForAnalyzing="YES" buildForTesting="YES" buildForProfiling="YES" buildForRunning="YES" buildForArchiving="YES"> <BuildActionEntry buildForProfiling="YES" buildForArchiving="YES" buildForTesting="YES" buildForRunning="YES" buildForAnalyzing="YES">
<BuildableReference BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" BlueprintIdentifier="F2FE34CED4660C9200000000" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a"></BuildableReference> <BuildableReference BuildableIdentifier="primary" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a"></BuildableReference>
</BuildActionEntry> </BuildActionEntry>
</BuildActionEntries> </BuildActionEntries>
</BuildAction> </BuildAction>
<TestAction selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES"> <TestAction buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB">
<Testables></Testables> <Testables></Testables>
<BuildableProductRunnable runnableDebuggingMode="0"> <BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary"></BuildableReference> <BuildableReference BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary"></BuildableReference>
</BuildableProductRunnable> </BuildableProductRunnable>
</TestAction> </TestAction>
<LaunchAction launchStyle="0" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" ignoresPersistentStateOnLaunch="NO" debugDocumentVersioning="YES" useCustomWorkingDirectory="NO" debugServiceExtension="internal" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" allowLocationSimulation="YES" buildConfiguration="Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB"> <LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" launchStyle="0" debugServiceExtension="internal" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" allowLocationSimulation="YES" debugDocumentVersioning="YES" buildConfiguration="Debug" useCustomWorkingDirectory="NO" ignoresPersistentStateOnLaunch="NO">
<EnvironmentVariables></EnvironmentVariables> <EnvironmentVariables></EnvironmentVariables>
<MacroExpansion> <MacroExpansion>
<BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CED4660C9200000000"></BuildableReference> <BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000"></BuildableReference>
</MacroExpansion> </MacroExpansion>
</LaunchAction> </LaunchAction>
<ProfileAction shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" buildConfiguration="__TulsiTestRunner_Release"> <ProfileAction buildConfiguration="__TulsiTestRunner_Release" shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES">
<MacroExpansion> <MacroExpansion>
<BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary"></BuildableReference> <BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableIdentifier="primary"></BuildableReference>
</MacroExpansion> </MacroExpansion>
</ProfileAction> </ProfileAction>
<AnalyzeAction buildConfiguration="Debug"></AnalyzeAction> <AnalyzeAction buildConfiguration="Debug"></AnalyzeAction>
<ArchiveAction buildConfiguration="Release" revealArchiveInOrganizer="YES"></ArchiveAction> <ArchiveAction revealArchiveInOrganizer="YES" buildConfiguration="Release"></ArchiveAction>
</Scheme> </Scheme>

View File

@ -16,6 +16,9 @@ typedef struct {
@property (nonatomic) CGFloat whiten; @property (nonatomic) CGFloat whiten;
@property (nonatomic) CGFloat smooth; @property (nonatomic) CGFloat smooth;
@property (nonatomic) CGFloat slim;
@property (nonatomic) CGFloat nose;
@property (nonatomic) CGFloat eyeFactor;
+ (instancetype)sharedInstance; + (instancetype)sharedInstance;

View File

@ -30,7 +30,7 @@
{ {
_face_module = Opipe::FaceMeshModule::create(); _face_module = Opipe::FaceMeshModule::create();
NSBundle *bundle = [NSBundle bundleForClass:[self class]]; NSBundle *bundle = [NSBundle bundleForClass:[self class]];
NSURL* graphURL = [bundle URLForResource:@"face_mesh_mobile_gpu" withExtension:@"binarypb"]; NSURL* graphURL = [bundle URLForResource:@"face_mesh_mobile_landmark_gpu" withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:nil]; NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:nil];
if (data) { if (data) {
_face_module->init(nullptr, (void *)data.bytes, data.length); _face_module->init(nullptr, (void *)data.bytes, data.length);
@ -113,6 +113,36 @@
_face_module->setSmoothing(smooth); _face_module->setSmoothing(smooth);
} }
- (CGFloat)slim
{
return _face_module->getSlim();
}
- (void)setSlim:(CGFloat)slim
{
_face_module->setSlim(slim);
}
- (CGFloat)eyeFactor
{
return _face_module->getEye();
}
- (void)setEyeFactor:(CGFloat)eyeFactor
{
_face_module->setEye(eyeFactor);
}
- (CGFloat)nose
{
return _face_module->getNose();
}
- (void)setNose:(CGFloat)nose
{
_face_module->setNose(nose);
}
- (void)resume - (void)resume
{ {
if (!_face_module) { if (!_face_module) {

BIN
mediapipe/render/module/beauty/whiten.png Normal file → Executable file

Binary file not shown.

Before

Width:  |  Height:  |  Size: 331 KiB

After

Width:  |  Height:  |  Size: 157 KiB

View File

@ -42,8 +42,8 @@ namespace Opipe
pixelBuffer = packet.Get<mediapipe::Image>().GetCVPixelBufferRef(); pixelBuffer = packet.Get<mediapipe::Image>().GetCVPixelBufferRef();
graph->_delegate.lock()->outputPixelbuffer(graph, pixelBuffer, streamName, packet.Timestamp().Value()); graph->_delegate.lock()->outputPixelbuffer(graph, pixelBuffer, streamName, packet.Timestamp().Value());
#endif
} }
#endif
} }
@ -120,7 +120,7 @@ namespace Opipe
} }
} }
status = _graph->StartRun(_inputSidePackets, _streamHeaders); status = _graph->StartRun(_inputSidePackets, _streamHeaders);
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]); // NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
if (!status.ok()) if (!status.ok())
{ {
return status; return status;
@ -132,14 +132,14 @@ namespace Opipe
const std::string &streamName) const std::string &streamName)
{ {
absl::Status status = _graph->AddPacketToInputStream(streamName, packet); absl::Status status = _graph->AddPacketToInputStream(streamName, packet);
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]); // NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
return status.ok(); return status.ok();
} }
bool OlaGraph::movePacket(mediapipe::Packet &&packet, const std::string &streamName) bool OlaGraph::movePacket(mediapipe::Packet &&packet, const std::string &streamName)
{ {
absl::Status status = _graph->AddPacketToInputStream(streamName, std::move(packet)); absl::Status status = _graph->AddPacketToInputStream(streamName, std::move(packet));
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]); // NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
return status.ok(); return status.ok();
} }

View File

@ -36,7 +36,7 @@ namespace Opipe
#if defined(__APPLE__) #if defined(__APPLE__)
virtual void outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer, virtual void outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer,
const std::string &streamName, const std::string &streamName,
int64_t timstamp) = 0; int64_t timestamp) = 0;
#endif #endif
@ -170,8 +170,17 @@ namespace Opipe
/// Waits for the graph to become idle. /// Waits for the graph to become idle.
bool waitUntilIdle(); bool waitUntilIdle();
void setUseVideoOutput(bool useVideoOutput) {
_useVideoOutput = useVideoOutput;
}
bool useVideoOutput() {
return _useVideoOutput;
}
std::weak_ptr<MPPGraphDelegate> _delegate; std::weak_ptr<MPPGraphDelegate> _delegate;
std::atomic<int32_t> _framesInFlight = 0; std::atomic<int32_t> _framesInFlight = 0;
std::atomic<int32_t> _retryCount = 0;
private: private:
std::unique_ptr<mediapipe::CalculatorGraph> _graph; std::unique_ptr<mediapipe::CalculatorGraph> _graph;
@ -189,6 +198,7 @@ namespace Opipe
int64 _frameNumber; int64 _frameNumber;
bool _started; bool _started;
bool _useVideoOutput = true;
absl::Status performStart(); absl::Status performStart();