适配所有的FaceMesh关键点 到脸部整形 滤镜里

This commit is contained in:
Wang.Renzhu 2022-08-01 19:55:29 +08:00
parent 49e285db9c
commit a3a7ab7f56
37 changed files with 2240 additions and 8832 deletions

View File

@ -67,3 +67,10 @@ mediapipe_binary_graph(
output_name = "face_mesh_mobile_gpu.binarypb",
deps = [":mobile_calculators"],
)
mediapipe_binary_graph(
name = "face_mesh_mobile_landmark_gpu_binary_graph",
graph = "face_mesh_mobile_landmark.pbtxt",
output_name = "face_mesh_mobile_landmark_gpu.binarypb",
deps = [":mobile_calculators"],
)

View File

@ -1,5 +1,5 @@
# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU.
max_queue_size: 2
# GPU buffer. (GpuBuffer)
input_stream: "input_video"

View File

@ -0,0 +1,69 @@
# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU.
# GPU buffer. (GpuBuffer)
input_stream: "input_video"
# Max number of faces to detect/process. (int)
input_side_packet: "num_faces"
# Output image with rendered results. (GpuBuffer)
output_stream: "output_video"
# Collection of detected/processed faces, each represented as a list of
# landmarks. (std::vector<NormalizedLandmarkList>)
output_stream: "multi_face_landmarks"
# output_stream: "face_detections"
# Throttles the images flowing downstream for flow control. It passes through
# the very first incoming image unaltered, and waits for downstream nodes
# (calculators and subgraphs) in the graph to finish their tasks before it
# passes through another image. All images that come in while waiting are
# dropped, limiting the number of in-flight images in most part of the graph to
# 1. This prevents the downstream nodes from queuing up incoming images and data
# excessively, which leads to increased latency and memory usage, unwanted in
# real-time mobile applications. It also eliminates unnecessarily computation,
# e.g., the output produced by a node may get dropped downstream if the
# subsequent nodes are still busy processing previous inputs.
node {
calculator: "FlowLimiterCalculator"
input_stream: "input_video"
input_stream: "FINISHED:output_video"
input_stream_info: {
tag_index: "FINISHED"
back_edge: true
}
output_stream: "throttled_input_video"
}
# Defines side packets for further use in the graph.
node {
calculator: "ConstantSidePacketCalculator"
output_side_packet: "PACKET:with_attention"
node_options: {
[type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: {
packet { bool_value: true }
}
}
}
# Subgraph that detects faces and corresponding landmarks.
node {
calculator: "FaceLandmarkFrontGpu"
input_stream: "IMAGE:throttled_input_video"
input_side_packet: "NUM_FACES:num_faces"
input_side_packet: "WITH_ATTENTION:with_attention"
output_stream: "LANDMARKS:multi_face_landmarks"
output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks"
output_stream: "DETECTIONS:face_detections"
output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections"
}
# Subgraph that renders face-landmark annotation onto the input image. //画标记 用于调试
node {
calculator: "FaceRendererGpu"
input_stream: "IMAGE:throttled_input_video"
input_stream: "LANDMARKS:multi_face_landmarks"
input_stream: "NORM_RECTS:face_rects_from_landmarks"
input_stream: "DETECTIONS:face_detections"
output_stream: "IMAGE:output_video"
}

View File

@ -63,6 +63,13 @@ cc_library(
"//mediapipe:apple": [],
"//conditions:default": [],
}),
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
)
cc_library(

View File

@ -341,8 +341,8 @@ bool Filter::proceed(float frameTime, bool bUpdateTargets/* = true*/) {
CHECK_GL(glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0));
filter_externDraw();
_framebuffer->inactive();
Log("Filter", "%s渲染完毕准备开始Unlock Framebuffer:%s", typeid(*this).name(),
_framebuffer->_hashCode.c_str());
// Log("Filter", "%s渲染完毕准备开始Unlock Framebuffer:%s", typeid(*this).name(),
// _framebuffer->_hashCode.c_str());
#if DEBUG
_framebuffer->unlock(typeid(*this).name());
#else
@ -502,6 +502,43 @@ void Filter::update(float frameTime) {
_framebuffer = 0;
}
bool Filter::getProperty(const std::string& name, std::vector<Vec2>& retValue) {
Property* property = _getProperty(name);
if (!property) return false;
retValue = ((Vec2ArrayProperty*)property)->value;
return true;
}
bool Filter::registerProperty(const std::string& name,
std::vector<Vec2> defaultValue,
const std::string& comment/* = ""*/,
std::function<void(std::vector<Vec2>&)> setCallback/* = 0*/) {
if (hasProperty(name)) return false;
Vec2ArrayProperty property;
property.type = "vec2Array";
property.value = defaultValue;
property.comment = comment;
property.setCallback = setCallback;
_vec2ArrayProperties[name] = property;
return true;
}
bool Filter::setProperty(const std::string& name, std::vector<Vec2> value) {
Property* rawProperty = _getProperty(name);
if (!rawProperty) {
Log("WARNING", "Filter::setProperty invalid property %s", name.c_str());
return false;
} else if (rawProperty->type != "vec2Array") {
Log("WARNING", "Filter::setProperty The property type is expected to be %s", rawProperty->type.c_str());
return false;
}
Vec2ArrayProperty* property = ((Vec2ArrayProperty *)rawProperty);
property->value = value;
if (property->setCallback)
property->setCallback(value);
return true;
}
bool Filter::registerProperty(const std::string& name, int defaultValue, const std::string& comment/* = ""*/, std::function<void(int&)> setCallback/* = 0*/) {
if (hasProperty(name)) return false;
IntProperty property;

View File

@ -122,9 +122,22 @@ public:
virtual bool registerProperty(const std::string& name, int defaultValue, const std::string& comment = "", std::function<void(int&)> setCallback = 0);
virtual bool registerProperty(const std::string& name, float defaultValue, const std::string& comment = "", std::function<void(float&)> setCallback = 0);
virtual bool registerProperty(const std::string& name, const std::string& defaultValue, const std::string& comment = "", std::function<void(std::string&)> setCallback = 0);
bool registerProperty(const std::string& name,
std::vector<Vec2> defaultValue,
const std::string& comment = "",
std::function<void(std::vector<Vec2>&)> setCallback = 0);
bool registerProperty(const std::string& name,
Vec2 defaultValue,
const std::string& comment = "",
std::function<void(Vec2&)> setCallback = 0);
bool setProperty(const std::string& name, Vec2 value);
bool setProperty(const std::string& name, int value);
bool setProperty(const std::string& name, float value);
bool setProperty(const std::string& name, std::string value);
bool setProperty(const std::string& name, std::vector<Vec2> retValue);
bool getProperty(const std::string& name, std::vector<Vec2>& retValue);
bool getProperty(const std::string& name, int& retValue);
bool getProperty(const std::string& name, float& retValue);
bool getProperty(const std::string& name, std::string& retValue);
@ -223,8 +236,27 @@ protected:
std::string type;
std::string comment;
};
struct Vec2ArrayProperty : Property {
std::vector<Vec2> value;
std::function<void(std::vector<Vec2>&)> setCallback;
};
std::map<std::string, Vec2ArrayProperty> _vec2ArrayProperties;
struct Vec2Property : Property {
Vec2 value;
std::function<void(Vec2&)> setCallback;
};
virtual Property* _getProperty(const std::string& name);
std::map<std::string, Vec2Property> _vec2Properties;
struct Vec3Property : Property {
Vec3 value;
std::function<void(Vec3&)> setCallback;
};
struct IntProperty : Property {
int value;
std::function<void(int&)> setCallback;

View File

@ -136,16 +136,16 @@ namespace Opipe {
void Framebuffer::lock(std::string lockKey) {
if (lockKey == "Unknow") {
Log("Framebuffer LOCK", "未知锁 【hasCode :%s】", _hashCode.c_str());
// Log("Framebuffer LOCK", "未知锁 【hasCode :%s】", _hashCode.c_str());
} else if (lockKey != _lockKey) {
Log("Framebuffer LOCK", "Key变更:%s 【hasCode :%s】", lockKey.c_str(), _hashCode.c_str());
// Log("Framebuffer LOCK", "Key变更:%s 【hasCode :%s】", lockKey.c_str(), _hashCode.c_str());
}
_lockKey = lockKey;
_framebufferRetainCount++;
Log("Framebuffer LOCK", "lock retainCount == :%d lockKey:%s 【framebufferCode:%s】",
_framebufferRetainCount,
lockKey.c_str(), _hashCode.c_str());
// Log("Framebuffer LOCK", "lock retainCount == :%d lockKey:%s 【framebufferCode:%s】",
// _framebufferRetainCount,
// lockKey.c_str(), _hashCode.c_str());
}
void Framebuffer::unlock(std::string lockKey) {
@ -156,16 +156,16 @@ namespace Opipe {
}
if (lockKey != _lockKey) {
Log("Framebuffer UNLOCK", "可能是多次Lock后Unlock retainCount:%d lockKey:%s 【framebufferCode:%s】",
_framebufferRetainCount,
lockKey.c_str(),
_hashCode.c_str());
// Log("Framebuffer UNLOCK", "可能是多次Lock后Unlock retainCount:%d lockKey:%s 【framebufferCode:%s】",
// _framebufferRetainCount,
// lockKey.c_str(),
// _hashCode.c_str());
}
Log("Framebuffer UNLOCK", "unlock retainCount == :%d lockKey:%s 【framebufferCode:%s】"
, _framebufferRetainCount,
lockKey.c_str(),
_hashCode.c_str());
// Log("Framebuffer UNLOCK", "unlock retainCount == :%d lockKey:%s 【framebufferCode:%s】"
// , _framebufferRetainCount,
// lockKey.c_str(),
// _hashCode.c_str());
}
void Framebuffer::resetRetainCount() {

View File

@ -88,16 +88,16 @@ Framebuffer* FramebufferCache::fetchFramebuffer(Context *context,
forceCleanFramebuffer(framebuffer);
framebuffer = 0;
} else if (framebuffer->framebufferRetainCount() == 0 && !framebuffer->isDealloc) {
Log("Framebuffer 【命中缓存】", "hashcode:%s count:%d",
framebufferHashCodeKey.first.c_str(),
framebuffer->framebufferRetainCount());
// Log("Framebuffer 【命中缓存】", "hashcode:%s count:%d",
// framebufferHashCodeKey.first.c_str(),
// framebuffer->framebufferRetainCount());
return framebuffer;
}
}
}
Log("Framebuffer 所有缓存【未命中】", "hashcode:%s count:%d",
lookupHash.c_str(),
matchFramebuffersHashCode.size());
// Log("Framebuffer 所有缓存【未命中】", "hashcode:%s count:%d",
// lookupHash.c_str(),
// matchFramebuffersHashCode.size());
// 如果都被占用了 或者找不到对应的Framebuffer 则需要创建一个新的
if (useTextureCache) {

View File

@ -15,10 +15,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if defined(__APPLE__)
#import <Foundation/Foundation.h>
#endif
#include "GPUImageUtil.h"
#define openLog 1
namespace Opipe {
@ -39,7 +41,19 @@ namespace Opipe {
}
void Log(const std::string &tag, const std::string &format, ...) {
#if openLog
char buffer[10240];
va_list args;
va_start(args, format);
vsprintf(buffer, format.c_str(), args);
va_end(args);
#if defined(__APPLE__)
NSLog(@"%s: %s", tag.c_str(), buffer);
#else
__android_log_print(ANDROID_LOG_INFO, tag.c_str(), "%s", buffer);
#endif
#endif
}
/**

View File

@ -20,7 +20,7 @@
/// @param texture texture description
/// @param onScreenTexture 上屏纹理
/// @param frameTime 帧时间
- (IOSurfaceID)bgraCameraTextureReady:(OlaShareTexture *)texture
- (void)bgraCameraTextureReady:(OlaShareTexture *)texture
onScreenTexture:(OlaShareTexture *)onScreenTexture
frameTime:(NSTimeInterval)frameTime;
@ -30,7 +30,7 @@
/// @param frameTime frameTime description
/// @param targetTexture targetTexture description
/// @param buffer MTL的CommandBuffer
- (void)externalRender:(NSTimeInterval)frameTime
- (IOSurfaceID)externalRender:(NSTimeInterval)frameTime
targetTexture:(OlaShareTexture *)targetTexture
commandBuffer:(id<MTLCommandBuffer>)buffer;
@ -71,6 +71,8 @@
/// @param sampleBuffer 相机采集流
- (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer;
- (void)renderPixelbuffer:(CVPixelBufferRef)pixelbuffer;
- (void)addRender:(OlaCameraRender *)render;

View File

@ -231,21 +231,23 @@ NS_INLINE size_t QAAlignSize(size_t size)
if (strongSelf.cameraDelegate && !strongSelf.isPaused) {
if (@available(iOS 11.0, *)) {
glFlush();
[EAGLContext setCurrentContext:strongSelf.openGLContext];
[strongSelf.cameraDelegate draw:strongSelf.frameTime];
[strongSelf.cameraDelegate externalRender:strongSelf.frameTime
targetTexture:strongSelf.shareTexture
commandBuffer:commandBuffer];
[EAGLContext setCurrentContext:self.openGLContext];
IOSurfaceID surfaceId = [strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.cameraTexture
[strongSelf.cameraDelegate bgraCameraTextureReady:strongSelf.cameraTexture
onScreenTexture:strongSelf.shareTexture
frameTime:strongSelf.frameTime * 1000];
IOSurfaceID surfaceId = [strongSelf.cameraDelegate externalRender:strongSelf.frameTime
targetTexture:strongSelf.cameraTexture
commandBuffer:commandBuffer];
if (surfaceId != -1) {
//这里渲染surfaceId
IOSurfaceRef ioSurface = IOSurfaceLookup(surfaceId);
IOSurfaceLock(ioSurface, kIOSurfaceLockReadOnly, nil);
if (ioSurface) {
if (self.lastIOSurfaceID != surfaceId || self.ioSurfaceTexture == nil) {
if (strongSelf.lastIOSurfaceID != surfaceId || strongSelf.ioSurfaceTexture == nil) {
id<MTLTexture> texture;
MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor
texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
@ -254,18 +256,16 @@ NS_INLINE size_t QAAlignSize(size_t size)
mipmapped:NO];
textureDescriptor.storageMode = MTLStorageModeShared;
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
texture = [self.device newTextureWithDescriptor:textureDescriptor iosurface:ioSurface plane:0];
self.ioSurfaceTexture = texture;
texture = [strongSelf.device newTextureWithDescriptor:textureDescriptor iosurface:ioSurface plane:0];
strongSelf.ioSurfaceTexture = texture;
textureDescriptor = nil;
}
IOSurfaceUnlock(ioSurface, kIOSurfaceLockReadOnly, nil);
CFRelease(ioSurface);
self.lastIOSurfaceID = surfaceId;
if (self.ioSurfaceTexture) {
strongSelf.lastIOSurfaceID = surfaceId;
if (strongSelf.ioSurfaceTexture) {
IOSurfaceLock(ioSurface, kIOSurfaceLockReadOnly, nil);
[strongSelf.mtlRender renderToTexture:drawable.texture
from:self.ioSurfaceTexture
from:strongSelf.ioSurfaceTexture
commandBuffer:commandBuffer
textureCoordinate:strongSelf.mtlRender.noRotationBuffer];
if (drawable) {
@ -273,6 +273,8 @@ NS_INLINE size_t QAAlignSize(size_t size)
[commandBuffer addCompletedHandler:renderCompleted];
[commandBuffer commit];
}
IOSurfaceUnlock(ioSurface, kIOSurfaceLockReadOnly, nil);
CFRelease(ioSurface);
return;
}
@ -299,6 +301,37 @@ NS_INLINE size_t QAAlignSize(size_t size)
}
- (void)renderPixelbuffer:(CVPixelBufferRef)pixelbuffer
{
if (self.isPaused) {
return;
}
if (dispatch_semaphore_wait(self.cameraFrameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
dispatch_semaphore_t block_camera_sema = self.cameraFrameRenderingSemaphore;
__strong OlaMTLCameraRenderView *weakSelf = self;
void (^renderCompleted)(id<MTLCommandBuffer> buffer) = ^(id<MTLCommandBuffer> buffer)
{
dispatch_semaphore_signal(block_camera_sema);
};
CFRetain(pixelbuffer);
dispatch_async(self.displayRenderQueue, ^{
if (weakSelf == nil) {
CFRelease(pixelbuffer);
return;
}
__strong OlaMTLCameraRenderView *strongSelf = weakSelf;
[strongSelf.mtlRender renderToCameraTextureWithPixelBuffer:pixelbuffer completedHandler:renderCompleted];
CFRelease(pixelbuffer);
});
}
- (void)cameraSampleBufferArrive:(CMSampleBufferRef)sampleBuffer
{
if (self.isPaused) {

View File

@ -63,7 +63,7 @@ cc_library(
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
# "-fobjc-arc", # enable reference-counting
],
"//conditions:default": ["-std=c++17"],
}),

View File

@ -29,8 +29,8 @@ namespace Opipe
FaceMeshBeautyRender::~FaceMeshBeautyRender()
{
_olaBeautyFilter->removeAllTargets();
if (_olaBeautyFilter)
{
_olaBeautyFilter->release();
@ -122,6 +122,13 @@ namespace Opipe
return outputTexture;
}
void FaceMeshBeautyRender::setFacePoints(std::vector<Vec2> facePoints) {
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("face", facePoints);
}
}
float FaceMeshBeautyRender::getSmoothing()
{
return _smoothing;
@ -137,7 +144,7 @@ namespace Opipe
_smoothing = smoothing;
if (_olaBeautyFilter)
{
_olaBeautyFilter->setSmoothing(smoothing);
_olaBeautyFilter->setProperty("skin", smoothing);
}
}
@ -146,8 +153,23 @@ namespace Opipe
_whitening = whitening;
if (_olaBeautyFilter)
{
_olaBeautyFilter->setWhitening(whitening);
_olaBeautyFilter->setProperty("whiten", whitening);
}
}
void FaceMeshBeautyRender::setNoseFactor(float noseFactor) {
_noseFactor = noseFactor;
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("nose", noseFactor);
}
}
void FaceMeshBeautyRender::setFaceSlim(float slimFactor) {
_faceFactor = slimFactor;
if (_olaBeautyFilter) {
_olaBeautyFilter->setProperty("slim", slimFactor);
}
}
}

View File

@ -4,7 +4,7 @@
#include "mediapipe/render/module/beauty/filters/OlaBeautyFilter.hpp"
#include "mediapipe/render/core/OlaShareTextureFilter.hpp"
#include "mediapipe/render/core/SourceImage.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
namespace Opipe {
class FaceMeshBeautyRender {
public:
@ -25,6 +25,18 @@ namespace Opipe {
/// 美白
float getWhitening();
float getEye() {
return _eyeFactor;
}
float getFace() {
return _faceFactor;
}
float getNose() {
return _noseFactor;
}
/// 磨皮
/// @param smoothing 磨皮 0.0 - 1.0
@ -34,12 +46,29 @@ namespace Opipe {
/// 美白
/// @param whitening 美白 0.0 - 1.0
void setWhitening(float whitening);
/// 设置人脸点 mediapipe版
void setFacePoints(std::vector<Vec2> facePoints);
// 大眼
void setEye(float eyeFactor);
// 瘦脸
void setFaceSlim(float slimFactor);
// 瘦鼻
void setNoseFactor(float noseFactor);
private:
OlaBeautyFilter *_olaBeautyFilter = nullptr;
OlaShareTextureFilter *_outputFilter = nullptr;
Framebuffer *_inputFramebuffer = nullptr;
float _smoothing = 0.0;
float _whitening = 0.0;
float _noseFactor = 0.0;
float _faceFactor = 0.0;
float _eyeFactor = 0.0;
bool _isRendering = false;
Context *_context = nullptr;
SourceImage *_lutImage = nullptr;

View File

@ -1,5 +1,10 @@
#include "face_mesh_module_imp.h"
#include "mediapipe/render/core/Context.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
#if TestTemplateFace
#include "mediapipe/render/core/CVFramebuffer.hpp"
#import <UIKit/UIKit.h>
#endif
static const char* kNumFacesInputSidePacket = "num_faces";
static const char* kLandmarksOutputStream = "multi_face_landmarks";
@ -18,45 +23,43 @@ namespace Opipe
}
#if defined(__APPLE__)
void FaceMeshCallFrameDelegate::outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer,
const std::string &streamName, int64_t timstamp)
const std::string &streamName, int64_t timestamp)
{
_imp->currentDispatch()->runSync([&] {
IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixelbuffer);
IOSurfaceID surfaceId = IOSurfaceGetID(surface);
Log("Opipe", "streamName %s timeStamp:%ld iosurfaceid:%d", streamName.c_str(), timestamp, surfaceId);
});
}
#endif
void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet, const std::string &streamName) {
#if defined(__APPLE__)
NSLog(@"streamName:%@ ts:%lld 是否有人脸:%@", [NSString stringWithUTF8String:streamName.c_str()],
packet.Timestamp().Value(), @(_hasFace));
#endif
if (_imp == nullptr) {
return;
}
if (streamName == kLandmarksOutputStream) {
_last_landmark_ts = packet.Timestamp().Value();
if (_last_video_ts == _last_landmark_ts) {
//有人脸
_imp->currentDispatch()->runSync([&] {
if (streamName == kLandmarksOutputStream) {
_last_landmark_ts = packet.Timestamp().Value();
_hasFace = true;
const auto& multi_face_landmarks = packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>();
_lastLandmark = multi_face_landmarks[0];
}
}
Log("FaceMeshModule", "landmarkts:%ld", _last_landmark_ts);
if (_last_video_ts != _last_landmark_ts) {
_hasFace = false;
}
if (packet.Timestamp().Value() != _last_landmark_ts) {
_hasFace = false;
_last_landmark_ts = 0; //输出过一次的时间戳 不再输出
}
_last_video_ts = packet.Timestamp().Value();
if (_hasFace) {
if (_hasFace) {
_imp->setLandmark(_lastLandmark);
} else {
_imp->setLandmark(_emptyLandmark);
}
_imp->setLandmark(_lastLandmark, packet.Timestamp().Value());
} else {
_imp->setLandmark(_emptyLandmark, packet.Timestamp().Value());
}
}, Opipe::Context::IOContext);
}
void FaceMeshCallFrameDelegate::outputPacket(OlaGraph *graph, const mediapipe::Packet &packet,
@ -163,27 +166,34 @@ namespace Opipe
_dispatch->runSync([&] {
if (_render == nullptr) {
_render = new FaceMeshBeautyRender(_context);
#if TestTemplateFace
UIImage *image = [UIImage imageNamed:@"templateFace"];
_templateFace = SourceImage::create(_context, image);
#endif
}
});
}
return true;
}
void FaceMeshModuleIMP::setLandmark(NormalizedLandmarkList landmark)
void FaceMeshModuleIMP::setLandmark(NormalizedLandmarkList landmark, int64_t timeStamp)
{
_lastLandmark = std::move(landmark);
// if (_lastLandmark.landmark_size() == 0) {
//#if defined(__APPLE__)
// NSLog(@"没有人脸");
//#endif
// }
// for (int i = 0; i < _lastLandmark.landmark_size(); ++i) {
//#if defined(__APPLE__)
// NSLog(@"######## Set Landmark[%d]: (%f, %f, %f)", i, _lastLandmark.landmark(i).x(),
// _lastLandmark.landmark(i).y(), _lastLandmark.landmark(i).z());
//#endif
// }
if (_lastLandmark.landmark_size() == 0) {
Log("FaceMeshModule", "没有检测到人脸");
} else {
// _graph->cosumeFrame();
// _graph->closeAllInputStreams();
Log("FaceMeshModule", "检测到人脸输出");
}
}
void FaceMeshModuleIMP::startModule()
@ -193,6 +203,7 @@ namespace Opipe
return;
}
_isInit = _graph->start();
_graph->setUseVideoOutput(false);
}
void FaceMeshModuleIMP::stopModule()
@ -216,11 +227,24 @@ namespace Opipe
return;
}
Timestamp ts(timeStamp * 1000);
#if TestTemplateFace
auto *framebuffer = dynamic_cast<CVFramebuffer *>(_templateFace->getFramebuffer());
CVPixelBufferRef renderTarget = framebuffer->renderTarget;
framebuffer->lockAddress();
_graph->sendPixelBuffer(renderTarget, "input_video",
MPPPacketTypePixelBuffer,
ts);
framebuffer->unlockAddress();
#else
CVPixelBufferLockBaseAddress(pixelbuffer, 0);
_graph->sendPixelBuffer(pixelbuffer, "input_video",
MPPPacketTypePixelBuffer,
ts);
CVPixelBufferUnlockBaseAddress(pixelbuffer, 0);
#endif
}
#endif
@ -254,18 +278,21 @@ namespace Opipe
_dispatch->runSync([&] {
// GLsync sync;
// _dispatch->runAsync([&] {
// _render->renderTexture(inputTexture);
// sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
// glFlush();
// });
// glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
// glDeleteSync(sync);
_render->renderTexture(inputTexture);
});
textureInfo = _render->outputRenderTexture(inputTexture);
std::vector<Vec2> facePoints;
if (_lastLandmark.landmark_size() > 0) {
Log("FaceMeshModule", "检测到人脸输出");
for (int i = 0; i < _lastLandmark.landmark_size(); i++) {
facePoints.emplace_back( _lastLandmark.landmark(i).x(), _lastLandmark.landmark(i).y());
}
Log("FaceMeshModule", "检测到人脸输完毕");
} else {
_render->setFacePoints(facePoints);
}
return textureInfo;
}

View File

@ -8,6 +8,12 @@
#include "face_mesh_module.h"
#include "face_mesh_beauty_render.h"
#define TestTemplateFace 0
#if TestTemplateFace
#include "mediapipe/render/core/SourceImage.hpp"
#endif
namespace Opipe
{
class FaceMeshModuleIMP;
@ -32,7 +38,6 @@ namespace Opipe
private:
int64_t _last_landmark_ts = 0;
int64_t _last_video_ts = 0;
bool _hasFace = false;
NormalizedLandmarkList _lastLandmark;
NormalizedLandmarkList _emptyLandmark;
@ -79,7 +84,7 @@ namespace Opipe
virtual TextureInfo renderTexture(TextureInfo inputTexture) override;
virtual void setLandmark(NormalizedLandmarkList landmark);
virtual void setLandmark(NormalizedLandmarkList landmark, int64_t timestamp);
/// 磨皮
float getSmoothing() override {
@ -105,6 +110,10 @@ namespace Opipe
_render->setWhitening(whitening);
}
OpipeDispatch* currentDispatch() {
return _dispatch.get();
}
private:
std::unique_ptr<OpipeDispatch> _dispatch;
std::unique_ptr<OlaGraph> _graph;
@ -114,6 +123,10 @@ namespace Opipe
std::shared_ptr<FaceMeshCallFrameDelegate> _delegate;
FaceMeshBeautyRender *_render = nullptr;
OlaContext *_olaContext = nullptr;
#if TestTemplateFace
SourceImage *_templateFace = nullptr;
#endif
};
}
#endif

View File

@ -34,7 +34,7 @@ cc_library(
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
# "-fobjc-arc", # enable reference-counting
],
"//conditions:default": ["-std=c++17"],
}),

View File

@ -18,6 +18,7 @@ namespace Opipe
uniform int count;
uniform float eye;
uniform float slim;
uniform float nose;
uniform int debug;
void main() {
vec2 uv = texCoord.xy;
@ -58,6 +59,16 @@ namespace Opipe
uv = vec2(textureCoordinateToUse.x - (delta * directionX),
textureCoordinateToUse.y - (delta * directionY));
}
else if (types[i] == 3)
{
float dist = 1.0 - d;
float delta = scale[i] * dist * nose;
float deltaScale = smoothstep(u_min[i], u_max[i], dist);
float directionX = cos(angle[i]) * deltaScale;
float directionY = sin(angle[i]) * deltaScale / (3.0 / 4.0 * aspectRatio);
uv = vec2(textureCoordinateToUse.x - (delta * directionX),
textureCoordinateToUse.y - (delta * directionY));
}
}
}
vTexCoord = uv;
@ -228,12 +239,12 @@ namespace Opipe
_filterProgram->setUniformValue("eye", _eye);
_filterProgram->setUniformValue("slim", _slim);
_filterProgram->setUniformValue("nose", _nose);
//左眼放大
{
Vector2 point1 = Vector2(_facePoints[75].x, _facePoints[75].y);
Vector2 point2 = Vector2(_facePoints[79].x, _facePoints[79].y);
Vector2 point3 = Vector2(_facePoints[65].x, _facePoints[65].y);
Vector2 point1 = Vector2(_facePoints[362].x, _facePoints[362].y);
Vector2 point2 = Vector2(_facePoints[263].x, _facePoints[263].y);
Vector2 point3 = Vector2(_facePoints[417].x, _facePoints[417].y);
Vector2 center = point1.getCenter(point2);
float distance = center.distance(point3);
addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1);
@ -241,9 +252,9 @@ namespace Opipe
//右眼放大
{
Vector2 point1 = Vector2(_facePoints[66].x, _facePoints[66].y);
Vector2 point2 = Vector2(_facePoints[70].x, _facePoints[70].y);
Vector2 point3 = Vector2(_facePoints[55].x, _facePoints[55].y);
Vector2 point1 = Vector2(_facePoints[33].x, _facePoints[33].y);
Vector2 point2 = Vector2(_facePoints[133].x, _facePoints[133].y);
Vector2 point3 = Vector2(_facePoints[193].x, _facePoints[193].y);
Vector2 center = point1.getCenter(point2);
float distance = center.distance(point3);
addPoint(center, distance / 2, distance / 2, 0.3, 1, 0.0f, 0.0f, 1);
@ -251,10 +262,10 @@ namespace Opipe
//瘦左脸
{
Vector2 point1 = Vector2(_facePoints[11].x, _facePoints[11].y);
Vector2 point2 = Vector2(_facePoints[60].x, _facePoints[60].y);
Vector2 point3 = Vector2(_facePoints[4].x, _facePoints[4].y);
Vector2 point4 = Vector2(_facePoints[16].x, _facePoints[16].y);
Vector2 point1 = Vector2(_facePoints[136].x, _facePoints[136].y);
Vector2 point2 = Vector2(_facePoints[19].x, _facePoints[19].y);
Vector2 point3 = Vector2(_facePoints[234].x, _facePoints[234].y);
Vector2 point4 = Vector2(_facePoints[152].x, _facePoints[152].y);
float angle = getRadius(point2, point1);
addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle,
@ -263,10 +274,10 @@ namespace Opipe
}
//瘦右脸
{
Vector2 point1 = Vector2(_facePoints[21].x, _facePoints[21].y);
Vector2 point2 = Vector2(_facePoints[60].x, _facePoints[60].y);
Vector2 point3 = Vector2(_facePoints[28].x, _facePoints[28].y);
Vector2 point4 = Vector2(_facePoints[16].x, _facePoints[16].y);
Vector2 point1 = Vector2(_facePoints[379].x, _facePoints[379].y);
Vector2 point2 = Vector2(_facePoints[19].x, _facePoints[19].y);
Vector2 point3 = Vector2(_facePoints[454].x, _facePoints[454].y);
Vector2 point4 = Vector2(_facePoints[152].x, _facePoints[152].y);
float angle = getRadius(point2, point1);
addPoint(point1, point1.distance(point3), point1.distance(point4), 0.02, 2, angle,

View File

@ -19,15 +19,32 @@ namespace Opipe
virtual bool proceed(float frameTime = 0.0, bool bUpdateTargets = true) override;
public:
float eye() {
return _eye;
}
float slim() {
return _slim;
}
float nose() {
return _nose;
}
void setEye(float eye)
{
_eye = eye;
};
}
void setSlim(float slim)
{
_slim = slim;
};
}
void setNose(float nose)
{
_nose = nose;
}
void setFacePoints(std::vector<Vec2> facePoints)
{
@ -62,6 +79,7 @@ namespace Opipe
private:
float _eye = 0.0;
float _slim = 0.0;
float _nose = 0.0;
std::vector<Vec2> _facePoints; //暂时支持单个人脸
GLuint vao = -1;
GLuint eao = -1;

View File

@ -1,4 +1,5 @@
#include "OlaBeautyFilter.hpp"
#include "mediapipe/render/core/math/vec2.hpp"
namespace Opipe {
OlaBeautyFilter::OlaBeautyFilter(Context *context) : FilterGroup(context)
@ -67,7 +68,7 @@ namespace Opipe {
_lutFilter = LUTFilter::create(context);
_unSharpMaskFilter = UnSharpMaskFilter::create(context);
_unSharpMaskFilter->addTarget(_lutFilter, 0);
_faceDistortFilter = FaceDistortionFilter::create(context);
_bilateralAdjustFilter = BilateralAdjustFilter::create(context);
addFilter(_bilateralAdjustFilter);
@ -75,11 +76,6 @@ namespace Opipe {
_lookUpGroupFilter->addFilter(_unSharpMaskFilter);
_alphaBlendFilter = AlphaBlendFilter::create(context);
// addFilter(_lookUpGroupFilter);
// addFilter(_lutFilter);
// setTerminalFilter(_lutFilter);
_bilateralFilter = BilateralFilter::create(context);
addFilter(_bilateralFilter);
@ -88,9 +84,8 @@ namespace Opipe {
_bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->addTarget(_alphaBlendFilter, 0);
_alphaBlendFilter->setMix(0.0);
_alphaBlendFilter->setMix(0.8);
setTerminalFilter(_alphaBlendFilter);
_bilateralAdjustFilter->setOpacityLimit(0.6);
_bilateralFilter->setDistanceNormalizationFactor(2.746);
@ -99,45 +94,46 @@ namespace Opipe {
_unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false);
_unSharpMaskFilter->setIntensity(1.365);
// _bilateralFilter = BilateralFilter::create(context);
// addFilter(_bilateralFilter);
//
// _bilateralAdjustFilter = BilateralAdjustFilter::create(context);
// addFilter(_bilateralAdjustFilter);
//
// _unSharpMaskFilter = UnSharpMaskFilter::create(context);
//
// _lutFilter = LUTFilter::create(context);
// _unSharpMaskFilter->addTarget(_lutFilter, 0);
//
// _lookUpGroupFilter = FilterGroup::create(context);
// _lookUpGroupFilter->addFilter(_unSharpMaskFilter);
//
// _alphaBlendFilter = AlphaBlendFilter::create(context);
// _faceDistortFilter = FaceDistortionFilter::create(context);
//
//
// _bilateralFilter->addTarget(_bilateralAdjustFilter, 1)->
// addTarget(_alphaBlendFilter, 0);
//
// _bilateralAdjustFilter->addTarget(_lookUpGroupFilter)->
// addTarget(_alphaBlendFilter, 1)->addTarget(_faceDistortFilter);
//
// _alphaBlendFilter->setMix(0.8);
//
// _unSharpMaskFilter->setBlurRadiusInPixel(4.0f, true);
// _unSharpMaskFilter->setBlurRadiusInPixel(2.0f, false);
// _unSharpMaskFilter->setIntensity(1.365);
//
// _bilateralAdjustFilter->setOpacityLimit(0.6);
//
// _bilateralFilter->setDistanceNormalizationFactor(2.746);
// _bilateralFilter->setTexelSpacingMultiplier(2.7);
//
// setTerminalFilter(_faceDistortFilter);
_alphaBlendFilter->addTarget(_faceDistortFilter);
setTerminalFilter(_faceDistortFilter);
std::vector<Vec2> defaultFace;
registerProperty("face", defaultFace, "人脸点", [this](std::vector<Vec2> facePoints) {
_faceDistortFilter->setFacePoints(facePoints);
});
registerProperty("eye", 0.0f, "大眼 0.0 - 1.0",
[this](float eye) {
_faceDistortFilter->setEye(eye);
});
registerProperty("slim", 0.0f, "瘦脸 0.0 - 1.0",
[this](float slim) {
_faceDistortFilter->setSlim(slim);
});
registerProperty("nose", 0.0f, "瘦鼻 0.0 - 1.0",
[this](float nose) {
_faceDistortFilter->setNose(nose);
});
registerProperty("skin", 0.0f, "磨皮 0.0 - 1.0",
[this](float skin) {
if (skin == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(skin);
}
});
registerProperty("whiten", 0.0f, "美白 0.0 - 1.0",
[this](float whiten) {
_alphaBlendFilter->setMix(whiten);
});
return true;
}
@ -169,39 +165,4 @@ namespace Opipe {
}
}
void OlaBeautyFilter::setSmoothing(float smoothing) {
if (_bilateralAdjustFilter == nullptr) {
return;
}
if (smoothing == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(smoothing);
}
}
float OlaBeautyFilter::getSmoothing() {
if (_bilateralAdjustFilter) {
return _bilateralAdjustFilter->getOpacityLimit();
}
return 0.0;
}
void OlaBeautyFilter::setWhitening(float whitening) {
if (_alphaBlendFilter) {
_alphaBlendFilter->setMix(whitening);
}
_lutFilter->setStep(whitening);
}
float OlaBeautyFilter::getWhitening() {
if (_alphaBlendFilter) {
return _alphaBlendFilter->getMix();
}
return 0.0;
}
}

View File

@ -12,14 +12,6 @@ namespace Opipe
{
class OlaBeautyFilter : public FilterGroup
{
public:
float getSmoothing();
float getWhitening();
void setSmoothing(float smoothing);
void setWhitening(float whitening);
public:
static OlaBeautyFilter *create(Context *context);
@ -42,35 +34,6 @@ namespace Opipe
virtual ~OlaBeautyFilter();
void setFacePoints(std::vector<Vec2> facePoints) {
_faceDistortFilter->setFacePoints(facePoints);
}
// "大眼 0.0 - 1.0"
void setEye(float eye) {
_faceDistortFilter->setEye(eye);
}
//1.0f, "瘦脸 0.0 - 1.0",
void setSlim(float slim) {
_faceDistortFilter->setSlim(slim);
}
// "磨皮 0.0 - 1.0"
void setSkin(float skin) {
if (skin == 0.0) {
_bilateralAdjustFilter->setEnable(false);
} else {
_bilateralAdjustFilter->setEnable(true);
_bilateralAdjustFilter->setOpacityLimit(skin);
}
}
// "美白 0.0 - 1.0"
void setWhiten(float whiten) {
_alphaBlendFilter->setMix(whiten);
}
private:
BilateralFilter *_bilateralFilter = 0;
AlphaBlendFilter *_alphaBlendFilter = 0;
@ -79,7 +42,6 @@ namespace Opipe
UnSharpMaskFilter *_unSharpMaskFilter = 0;
FaceDistortionFilter *_faceDistortFilter = 0;
FilterGroup *_lookUpGroupFilter = 0;
SourceImage *_lutImage = 0;
};
}

View File

@ -7,7 +7,7 @@
<key>OpipeBeautyModuleExample.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>7</integer>
<integer>3</integer>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>

View File

@ -0,0 +1,21 @@
{
"images" : [
{
"filename" : "templateFace.jpg",
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -64,6 +64,7 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
}
[self setupSession];
[[OlaFaceUnity sharedInstance] resume];
}
- (void)viewDidLayoutSubviews
@ -254,29 +255,26 @@ AVCaptureAudioDataOutputSampleBufferDelegate> {
}
}
- (IOSurfaceID)bgraCameraTextureReady:(OlaShareTexture *)texture
- (void)bgraCameraTextureReady:(OlaShareTexture *)texture
onScreenTexture:(OlaShareTexture *)onScreenTexture
frameTime:(NSTimeInterval)frameTime
{
[[OlaFaceUnity sharedInstance] processVideoFrame:onScreenTexture.renderTarget timeStamp:frameTime];
FaceTextureInfo inputTexture;
inputTexture.width = onScreenTexture.size.width;
inputTexture.height = onScreenTexture.size.height;
inputTexture.textureId = onScreenTexture.openGLTexture;
inputTexture.ioSurfaceId = onScreenTexture.surfaceID;
inputTexture.frameTime = frameTime;
FaceTextureInfo result = [[OlaFaceUnity sharedInstance] render:inputTexture];
NSLog(@"result ioSurfaceId:%d", result.ioSurfaceId);
return result.ioSurfaceId;
[[OlaFaceUnity sharedInstance] processVideoFrame:texture.renderTarget timeStamp:frameTime];
}
- (void)externalRender:(NSTimeInterval)frameTime
- (IOSurfaceID)externalRender:(NSTimeInterval)frameTime
targetTexture:(OlaShareTexture *)targetTexture
commandBuffer:(id<MTLCommandBuffer>)buffer
{
FaceTextureInfo inputTexture;
inputTexture.width = targetTexture.size.width;
inputTexture.height = targetTexture.size.height;
inputTexture.textureId = targetTexture.openGLTexture;
inputTexture.ioSurfaceId = targetTexture.surfaceID;
inputTexture.frameTime = frameTime;
FaceTextureInfo result = [[OlaFaceUnity sharedInstance] render:inputTexture];
return result.ioSurfaceId;
}
- (void)yuvTextureReady:(OlaShareTexture *)yTexture uvTexture:(OlaShareTexture *)uvTexture

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

View File

@ -28,7 +28,8 @@ objc_library(
"@ios_opencv//:OpencvFramework",
],
data = [
"//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb",
# "//mediapipe/graphs/face_mesh:face_mesh_mobile_gpu.binarypb",
"//mediapipe/graphs/face_mesh:face_mesh_mobile_landmark_gpu.binarypb",
"//mediapipe/modules/face_detection:face_detection_short_range.tflite",
"//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite",
"//mediapipe/render/module/beauty:whiten.png",
@ -36,7 +37,7 @@ objc_library(
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
# "-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),

View File

@ -1,29 +1,29 @@
<Scheme version="1.3" LastUpgradeVersion="1000">
<BuildAction buildImplicitDependencies="YES" parallelizeBuildables="YES">
<Scheme LastUpgradeVersion="1000" version="1.3">
<BuildAction parallelizeBuildables="YES" buildImplicitDependencies="YES">
<BuildActionEntries>
<BuildActionEntry buildForTesting="YES" buildForRunning="YES" buildForProfiling="YES" buildForAnalyzing="YES" buildForArchiving="YES">
<BuildableReference BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableIdentifier="primary" BlueprintName="OlaFaceUnityFramework" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj"></BuildableReference>
<BuildActionEntry buildForAnalyzing="YES" buildForTesting="YES" buildForRunning="YES" buildForArchiving="YES" buildForProfiling="YES">
<BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary" BuildableName="OlaFaceUnityFramework.framework" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction buildConfiguration="__TulsiTestRunner_Debug" shouldUseLaunchSchemeArgsEnv="YES" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB">
<TestAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB">
<Testables></Testables>
<BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework"></BuildableReference>
<BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableName="OlaFaceUnityFramework.framework" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BlueprintName="OlaFaceUnityFramework"></BuildableReference>
</BuildableProductRunnable>
</TestAction>
<LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" debugServiceExtension="internal" allowLocationSimulation="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" buildConfiguration="Debug" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" launchStyle="0" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" ignoresPersistentStateOnLaunch="NO">
<LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" ignoresPersistentStateOnLaunch="NO" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" allowLocationSimulation="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" debugServiceExtension="internal" launchStyle="0" buildConfiguration="Debug">
<EnvironmentVariables></EnvironmentVariables>
<BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintName="OlaFaceUnityFramework" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference>
<BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework"></BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction debugDocumentVersioning="YES" buildConfiguration="__TulsiTestRunner_Release" shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO">
<ProfileAction buildConfiguration="__TulsiTestRunner_Release" debugDocumentVersioning="YES" useCustomWorkingDirectory="NO" shouldUseLaunchSchemeArgsEnv="YES">
<BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BlueprintIdentifier="F2FE34CE0C5C7AFE00000000" BuildableName="OlaFaceUnityFramework.framework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="OlaFaceUnityFramework" BuildableIdentifier="primary"></BuildableReference>
<BuildableReference BuildableName="OlaFaceUnityFramework.framework" BlueprintName="OlaFaceUnityFramework" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CE0C5C7AFE00000000"></BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction buildConfiguration="Debug"></AnalyzeAction>
<ArchiveAction buildConfiguration="Release" revealArchiveInOrganizer="YES"></ArchiveAction>
<ArchiveAction revealArchiveInOrganizer="YES" buildConfiguration="Release"></ArchiveAction>
</Scheme>

View File

@ -2,28 +2,28 @@
<Scheme version="1.3" LastUpgradeVersion="1000">
<BuildAction parallelizeBuildables="YES" buildImplicitDependencies="YES">
<BuildActionEntries>
<BuildActionEntry buildForAnalyzing="YES" buildForTesting="YES" buildForProfiling="YES" buildForRunning="YES" buildForArchiving="YES">
<BuildableReference BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" BlueprintIdentifier="F2FE34CED4660C9200000000" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a"></BuildableReference>
<BuildActionEntry buildForProfiling="YES" buildForArchiving="YES" buildForTesting="YES" buildForRunning="YES" buildForAnalyzing="YES">
<BuildableReference BuildableIdentifier="primary" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a"></BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES">
<TestAction buildConfiguration="__TulsiTestRunner_Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" shouldUseLaunchSchemeArgsEnv="YES" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB">
<Testables></Testables>
<BuildableProductRunnable runnableDebuggingMode="0">
<BuildableReference BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary"></BuildableReference>
<BuildableReference BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary"></BuildableReference>
</BuildableProductRunnable>
</TestAction>
<LaunchAction launchStyle="0" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" ignoresPersistentStateOnLaunch="NO" debugDocumentVersioning="YES" useCustomWorkingDirectory="NO" debugServiceExtension="internal" customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" allowLocationSimulation="YES" buildConfiguration="Debug" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB">
<LaunchAction customLLDBInitFile="$(PROJECT_FILE_PATH)/.tulsi/Utils/lldbinit" selectedDebuggerIdentifier="Xcode.DebuggerFoundation.Debugger.LLDB" launchStyle="0" debugServiceExtension="internal" selectedLauncherIdentifier="Xcode.DebuggerFoundation.Launcher.LLDB" allowLocationSimulation="YES" debugDocumentVersioning="YES" buildConfiguration="Debug" useCustomWorkingDirectory="NO" ignoresPersistentStateOnLaunch="NO">
<EnvironmentVariables></EnvironmentVariables>
<MacroExpansion>
<BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BuildableIdentifier="primary" BlueprintIdentifier="F2FE34CED4660C9200000000"></BuildableReference>
<BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000"></BuildableReference>
</MacroExpansion>
</LaunchAction>
<ProfileAction shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES" buildConfiguration="__TulsiTestRunner_Release">
<ProfileAction buildConfiguration="__TulsiTestRunner_Release" shouldUseLaunchSchemeArgsEnv="YES" useCustomWorkingDirectory="NO" debugDocumentVersioning="YES">
<MacroExpansion>
<BuildableReference ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BuildableIdentifier="primary"></BuildableReference>
<BuildableReference BuildableName="libmediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary.a" BlueprintName="mediapipe-render-module-beauty-ios-framework-OlaFaceUnityLibrary" ReferencedContainer="container:FaceUnityFramework.xcodeproj" BlueprintIdentifier="F2FE34CED4660C9200000000" BuildableIdentifier="primary"></BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction buildConfiguration="Debug"></AnalyzeAction>
<ArchiveAction buildConfiguration="Release" revealArchiveInOrganizer="YES"></ArchiveAction>
<ArchiveAction revealArchiveInOrganizer="YES" buildConfiguration="Release"></ArchiveAction>
</Scheme>

View File

@ -30,7 +30,7 @@
{
_face_module = Opipe::FaceMeshModule::create();
NSBundle *bundle = [NSBundle bundleForClass:[self class]];
NSURL* graphURL = [bundle URLForResource:@"face_mesh_mobile_gpu" withExtension:@"binarypb"];
NSURL* graphURL = [bundle URLForResource:@"face_mesh_mobile_landmark_gpu" withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:nil];
if (data) {
_face_module->init(nullptr, (void *)data.bytes, data.length);

BIN
mediapipe/render/module/beauty/whiten.png Normal file → Executable file

Binary file not shown.

Before

Width:  |  Height:  |  Size: 331 KiB

After

Width:  |  Height:  |  Size: 157 KiB

View File

@ -42,8 +42,8 @@ namespace Opipe
pixelBuffer = packet.Get<mediapipe::Image>().GetCVPixelBufferRef();
graph->_delegate.lock()->outputPixelbuffer(graph, pixelBuffer, streamName, packet.Timestamp().Value());
#endif
}
#endif
}
@ -120,7 +120,7 @@ namespace Opipe
}
}
status = _graph->StartRun(_inputSidePackets, _streamHeaders);
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
// NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
if (!status.ok())
{
return status;
@ -132,14 +132,14 @@ namespace Opipe
const std::string &streamName)
{
absl::Status status = _graph->AddPacketToInputStream(streamName, packet);
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
// NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
return status.ok();
}
bool OlaGraph::movePacket(mediapipe::Packet &&packet, const std::string &streamName)
{
absl::Status status = _graph->AddPacketToInputStream(streamName, std::move(packet));
NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
// NSLog(@"errors:%@", [NSString stringWithUTF8String:status.ToString().c_str()]);
return status.ok();
}

View File

@ -36,7 +36,7 @@ namespace Opipe
#if defined(__APPLE__)
virtual void outputPixelbuffer(OlaGraph *graph, CVPixelBufferRef pixelbuffer,
const std::string &streamName,
int64_t timstamp) = 0;
int64_t timestamp) = 0;
#endif
@ -170,8 +170,17 @@ namespace Opipe
/// Waits for the graph to become idle.
bool waitUntilIdle();
void setUseVideoOutput(bool useVideoOutput) {
_useVideoOutput = useVideoOutput;
}
bool useVideoOutput() {
return _useVideoOutput;
}
std::weak_ptr<MPPGraphDelegate> _delegate;
std::atomic<int32_t> _framesInFlight = 0;
std::atomic<int32_t> _retryCount = 0;
private:
std::unique_ptr<mediapipe::CalculatorGraph> _graph;
@ -189,6 +198,7 @@ namespace Opipe
int64 _frameNumber;
bool _started;
bool _useVideoOutput = true;
absl::Status performStart();