diff --git a/mediapipe/calculators/tensor/image_to_tensor_converter_metal.cc b/mediapipe/calculators/tensor/image_to_tensor_converter_metal.cc index f47d2da9a..55722e221 100644 --- a/mediapipe/calculators/tensor/image_to_tensor_converter_metal.cc +++ b/mediapipe/calculators/tensor/image_to_tensor_converter_metal.cc @@ -185,9 +185,34 @@ class SubRectExtractorMetal { id command_buffer, id destination) { auto output_texture = MTLTextureWithBuffer(destination_size, destination); - return InternalExecute(input_texture, sub_rect, flip_horizontally, alpha, - beta, destination_size, command_buffer, - output_texture); + absl::Status status = + InternalExecute(input_texture, sub_rect, flip_horizontally, alpha, beta, + destination_size, command_buffer, output_texture); +// On the simulator the `output_texture` cannot share it's underlying storage +// with the tensor's CPU Buffer. Hence the contents of the `output_texture` +// after sub rect extraction must be copied to the tensor's CPU buffer held by +// the `destination` buffer. +#if TARGET_IPHONE_SIMULATOR + NSUInteger output_bytes_per_row = + GetBytesPerRaw(output_format_, destination_size); + + id blitCommandEncoder = + command_buffer.blitCommandEncoder; + [blitCommandEncoder copyFromTexture:output_texture + sourceSlice:0 + sourceLevel:0 + sourceOrigin:MTLOriginMake(0, 0, 0) + sourceSize:MTLSizeMake(output_texture.width, + output_texture.height, + output_texture.depth) + toBuffer:destination + destinationOffset:0 + destinationBytesPerRow:output_bytes_per_row + destinationBytesPerImage:0]; + [blitCommandEncoder endEncoding]; +#endif + + return status; } private: @@ -201,11 +226,21 @@ class SubRectExtractorMetal { texture_desc.usage = MTLTextureUsageRenderTarget; NSUInteger output_bytes_per_row = GetBytesPerRaw(output_format_, size); + // Creating a no copy `MTLTexture` from an `MTLBuffer` with + // MTLStorageModeShared on the simultor results in the following error: + // "Linear textures from shared buffers is not supported on this device." + // To mitigate this crash, an empty `MTLTexture` is created using the + // `MTLDevice`. The solution invovles an extra copy for copying the + // underlying buffer of the texture back to the tensor's CPU Buffer. + id texture; +#if TARGET_IPHONE_SIMULATOR + texture = [buffer.device newTextureWithDescriptor:texture_desc]; +#else + texture = [buffer newTextureWithDescriptor:texture_desc + offset:0 + bytesPerRow:output_bytes_per_row]; +#endif - id texture = - [buffer newTextureWithDescriptor:texture_desc - offset:0 - bytesPerRow:output_bytes_per_row]; return texture; } diff --git a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc index 5983758f9..2324895f7 100644 --- a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc +++ b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc @@ -50,11 +50,38 @@ GlTextureView GpuBufferStorageCvPixelBuffer::GetTexture( const GlTextureInfo info = GlTextureInfoForGpuBufferFormat( format(), plane, gl_context->GetGlVersion()); CVTextureType cv_texture_temp; + +// The current pixel buffer was created by `CVPixelBufferCreate` with attribute +// `kCVPixelBufferIOSurfacePropertiesKey` to ensure that it can be used to +// create a `CVMetalTextureCache`. But creating an OPENGL ES texture from a +// pixel buffer with IOSurface crashes on the simulator. To workaround this, a +// new pixel buffer sharing the same storage of the current pixel buffer eith +// IOSurface is created using `CVPixelBufferCreateWithBytes`. This pixel buffer +// is then used to create the OPENGL ES texture on the simulator. On the device +// OPENGL ES texture creation requires a pixel buffer with IOSurface and hence +// the current piel buffer can be used. +#if TARGET_IPHONE_SIMULATOR + CVPixelBufferRef simulator_pixel_buffer; + CVPixelBufferLockBaseAddress(**this, 0); + CVPixelBufferCreateWithBytes( + NULL, CVPixelBufferGetWidth(**this), CVPixelBufferGetHeight(**this), + CVPixelBufferGetPixelFormatType(**this), + CVPixelBufferGetBaseAddress(**this), CVPixelBufferGetBytesPerRow(**this), + NULL, NULL, NULL, &simulator_pixel_buffer); + CVPixelBufferUnlockBaseAddress(**this, 0); + err = CVOpenGLESTextureCacheCreateTextureFromImage( + kCFAllocatorDefault, gl_context->cv_texture_cache(), + simulator_pixel_buffer, NULL, GL_TEXTURE_2D, info.gl_internal_format, + width() / info.downscale, height() / info.downscale, info.gl_format, + info.gl_type, plane, &cv_texture_temp); +#else err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault, gl_context->cv_texture_cache(), **this, NULL, GL_TEXTURE_2D, info.gl_internal_format, width() / info.downscale, height() / info.downscale, info.gl_format, info.gl_type, plane, &cv_texture_temp); +#endif + ABSL_CHECK(cv_texture_temp && !err) << "CVOpenGLESTextureCacheCreateTextureFromImage failed: " << err; CFHolder cv_texture; @@ -102,6 +129,7 @@ static void ViewDoneWritingSimulatorWorkaround(CVPixelBufferRef pixel_buffer, std::vector contiguous_buffer(contiguous_bytes_per_row * view.height()); uint8_t* temp_ptr = contiguous_buffer.data(); + glPixelStorei(GL_PACK_ALIGNMENT, 4); glReadPixels(0, 0, view.width(), view.height(), GL_BGRA, GL_UNSIGNED_BYTE, temp_ptr); for (int i = 0; i < view.height(); ++i) { diff --git a/mediapipe/objc/util.cc b/mediapipe/objc/util.cc index e5908f25a..65ef6b88f 100644 --- a/mediapipe/objc/util.cc +++ b/mediapipe/objc/util.cc @@ -251,25 +251,9 @@ static void FreeRefConReleaseCallback(void* refCon, const void* baseAddress) { CVReturn CreateCVPixelBufferWithoutPool(int width, int height, OSType cv_format, CVPixelBufferRef* out_buffer) { -#if TARGET_IPHONE_SIMULATOR - // On the simulator, syncing the texture with the pixelbuffer does not work, - // and we have to use glReadPixels. Since GL_UNPACK_ROW_LENGTH is not - // available in OpenGL ES 2, we should create the buffer so the pixels are - // contiguous. - // - // TODO: verify if we can use kIOSurfaceBytesPerRow to force - // CoreVideo to give us contiguous data. - size_t bytes_per_row = width * 4; - void* data = malloc(bytes_per_row * height); - return CVPixelBufferCreateWithBytes( - kCFAllocatorDefault, width, height, cv_format, data, bytes_per_row, - FreeRefConReleaseCallback, data, - GetCVPixelBufferAttributesForGlCompatibility(), out_buffer); -#else return CVPixelBufferCreate(kCFAllocatorDefault, width, height, cv_format, GetCVPixelBufferAttributesForGlCompatibility(), out_buffer); -#endif } absl::StatusOr> CreateCVPixelBufferWithoutPool( @@ -655,13 +639,16 @@ CFDictionaryRef GetCVPixelBufferAttributesForGlCompatibility() { kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); - // To ensure compatibility with CVOpenGLESTextureCache, these attributes - // should be present. However, on simulator this IOSurface attribute - // actually causes CVOpenGLESTextureCache to fail. b/144850076 + // To ensure compatibility with CVMetalTextureCache + // kCVPixelBufferIOSurfacePropertiesKey must be present. To ensure + // compatibility with CVOpenGLESTextureCache all the listed property keys + // must be present. However, on simulator this IOSurface attribute actually + // causes CVOpenGLESTextureCache to fail. b/144850076 We will use the pixel + // buffer created using these attributes to create CVOpenGLESTextureCache + // only on the device. For simulator, a different pixel buffer will be + // created. const void* keys[] = { -#if !TARGET_IPHONE_SIMULATOR kCVPixelBufferIOSurfacePropertiesKey, -#endif // !TARGET_IPHONE_SIMULATOR #if TARGET_OS_OSX kCVPixelFormatOpenGLCompatibility, @@ -671,10 +658,8 @@ CFDictionaryRef GetCVPixelBufferAttributesForGlCompatibility() { }; const void* values[] = { -#if !TARGET_IPHONE_SIMULATOR - empty_dict, -#endif // !TARGET_IPHONE_SIMULATOR - kCFBooleanTrue + empty_dict, + kCFBooleanTrue, }; attrs = CFDictionaryCreate(