diff --git a/.gitignore b/.gitignore index 088ba6b..7eeb2b7 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk +/refs/ diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..0866e88 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "ux-mediapipe" +version = "0.1.0" +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +name = "mediapipe" + +[dependencies] +cgmath = "0.18.0" +libc = "0.2.0" +opencv = {version = "0.63.0", default-features = false, features = ["videoio", "highgui", "imgproc"]} +protobuf = "2.23.0" diff --git a/README.md b/README.md index c9de18d..dc1b2a5 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ -# ux-media +# ux-mediapipe Rust and mediapipe + +bazel build --define MEDIAPIPE_DISABLE_GPU=1 mediapipe:mediagraph \ No newline at end of file diff --git a/examples/hand_tracking_desktop_live_gpu.txt b/examples/hand_tracking_desktop_live_gpu.txt new file mode 100644 index 0000000..4dcaac5 --- /dev/null +++ b/examples/hand_tracking_desktop_live_gpu.txt @@ -0,0 +1,48 @@ +# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu. + +# GPU image. (GpuBuffer) +input_stream: "input_video" + +# GPU image. (GpuBuffer) +output_stream: "output_video" +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +output_stream: "hand_landmarks" + +# Generates side packet cotaining max number of hands to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_hands" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 2 } + } + } +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingGpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:hand_landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_gpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:palm_detections" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections" + input_stream: "NORM_RECTS:1:hand_rects_from_landmarks" + output_stream: "IMAGE:output_video" +} diff --git a/examples/hello.rs b/examples/hello.rs new file mode 100644 index 0000000..327e42e --- /dev/null +++ b/examples/hello.rs @@ -0,0 +1,212 @@ +#![allow(unused_variables)] +#![allow(dead_code)] + +use mediapipe::*; + +mod examples { + use super::*; + use opencv::prelude::*; + use opencv::{highgui, imgproc, videoio, Result}; + + pub fn corner_rectangle() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + let detector = hands::HandDetector::default(); + + loop { + let mut frame = Mat::default(); + cap.read(&mut frame)?; + let size = frame.size()?; + if size.width > 0 { + highgui::imshow(window, &mut frame)? + } + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) + } + + // pub fn face_detection() -> Result<()> { + // let window = "video capture"; + + // highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + // let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + // if !cap.is_opened()? { + // panic!("Unable to open default cam") + // } + + // let detector = mediapipe::face_detection::FaceDetector::default(); + + // loop { + // let mut frame = Mat::default(); + // cap.read(&mut frame)?; + // let size = frame.size()?; + // if size.width > 0 { + // highgui::imshow(window, &mut frame)? + // } + // let key = highgui::wait_key(10)?; + // if key > 0 && key != 255 { + // break; + // } + // } + // Ok(()) + // } + + pub fn face_mesh() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut mesh = FaceMesh::default(); + let mut detector = face_mesh::FaceMeshDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + detector.process(&flip_frame, &mut mesh); + + highgui::imshow(window, &mut flip_frame)?; + println!( + "LANDMARK: {} {} {}", + mesh.data[0].x, mesh.data[0].y, mesh.data[0].z + ); + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) + } + + pub fn hand_tracking() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut left = Hand::default(); + let mut right = Hand::default(); + let mut detector = hands::HandDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + detector.process(&flip_frame, &mut left, &mut right); + + highgui::imshow(window, &mut flip_frame)?; + println!( + "LANDMARK: {} {} {}", + left.data[0].x, left.data[0].y, left.data[0].z + ); + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) + } + + pub fn pose_estimation() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut pose = Pose::default(); + let mut detector = pose::PoseDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + detector.process(&rgb_frame, &mut pose); + + highgui::imshow(window, &mut rgb_frame)?; + println!( + "LANDMARK: {} {} {}", + pose.data[0].x, pose.data[0].y, pose.data[0].z + ); + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) + } +} + +fn main() { + // examples::pose_estimation().unwrap() + // examples::hand_tracking().unwrap() + examples::face_mesh().unwrap() +} diff --git a/mediapipe/graphs/edge_detection/BUILD b/mediapipe/graphs/edge_detection/BUILD new file mode 100644 index 0000000..fac2411 --- /dev/null +++ b/mediapipe/graphs/edge_detection/BUILD @@ -0,0 +1,36 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/image:luminance_calculator", + "//mediapipe/calculators/image:sobel_edges_calculator", + ], +) + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +mediapipe_binary_graph( + name = "mobile_gpu_binary_graph", + graph = "edge_detection_mobile_gpu.pbtxt", + output_name = "mobile_gpu.binarypb", +) diff --git a/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt b/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt new file mode 100644 index 0000000..e3c572e --- /dev/null +++ b/mediapipe/graphs/edge_detection/edge_detection_mobile_gpu.pbtxt @@ -0,0 +1,22 @@ +# MediaPipe graph that performs GPU Sobel edge detection on a live video stream. +# Used in the examples in +# mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic:helloworld +# and mediapipe/examples/ios/helloworld. + +# Images coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Converts RGB images into luminance images, still stored in RGB format. +node: { + calculator: "LuminanceCalculator" + input_stream: "input_video" + output_stream: "luma_video" +} + +# Applies the Sobel filter to luminance images stored in RGB format. +node: { + calculator: "SobelEdgesCalculator" + input_stream: "luma_video" + output_stream: "output_video" +} diff --git a/mediapipe/graphs/face_detection/BUILD b/mediapipe/graphs/face_detection/BUILD new file mode 100644 index 0000000..9e7cf25 --- /dev/null +++ b/mediapipe/graphs/face_detection/BUILD @@ -0,0 +1,95 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + "//mediapipe/gpu:image_frame_to_gpu_buffer_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_cpu", + "//mediapipe/modules/face_detection:face_detection_short_range_gpu", + ], +) + +cc_library( + name = "desktop_live_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_cpu", + ], +) + +cc_library( + name = "desktop_live_gpu_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_gpu", + ], +) + +mediapipe_binary_graph( + name = "face_detection_mobile_cpu_binary_graph", + graph = "face_detection_mobile_cpu.pbtxt", + output_name = "face_detection_mobile_cpu.binarypb", + deps = [":mobile_calculators"], +) + +mediapipe_binary_graph( + name = "face_detection_mobile_gpu_binary_graph", + graph = "face_detection_mobile_gpu.pbtxt", + output_name = "face_detection_mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) + +cc_library( + name = "face_detection_full_range_mobile_gpu_deps", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/modules/face_detection:face_detection_full_range_gpu", + ], +) + +mediapipe_binary_graph( + name = "face_detection_full_range_mobile_gpu_binary_graph", + graph = "face_detection_full_range_mobile_gpu.pbtxt", + output_name = "face_detection_full_range_mobile_gpu.binarypb", + deps = [":face_detection_full_range_mobile_gpu_deps"], +) + +cc_library( + name = "face_detection_full_range_desktop_live_deps", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/modules/face_detection:face_detection_full_range_cpu", + ], +) diff --git a/mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt b/mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt new file mode 100644 index 0000000..023bea9 --- /dev/null +++ b/mediapipe/graphs/face_detection/face_detection_desktop_live.pbtxt @@ -0,0 +1,58 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on CPU. + +# CPU buffer. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" +# Detected faces. (std::vector) +output_stream: "face_detections" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that detects faces. +node { + calculator: "FaceDetectionShortRangeCpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:face_detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:face_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/face_detection/face_detection_full_range_desktop_live.pbtxt b/mediapipe/graphs/face_detection/face_detection_full_range_desktop_live.pbtxt new file mode 100644 index 0000000..0fdb6b9 --- /dev/null +++ b/mediapipe/graphs/face_detection/face_detection_full_range_desktop_live.pbtxt @@ -0,0 +1,60 @@ +# MediaPipe graph that performs face detection with TensorFlow Lite on CPU. +# Used in the examples in +# mediapipe/examples/desktop/face_detection:face_detection_cpu. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToDetectionsCalculator downstream in the graph to finish +# generating the corresponding detections before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToDetectionsCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Detects faces. +node { + calculator: "FaceDetectionFullRangeCpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} + diff --git a/mediapipe/graphs/face_detection/face_detection_full_range_mobile_gpu.pbtxt b/mediapipe/graphs/face_detection/face_detection_full_range_mobile_gpu.pbtxt new file mode 100644 index 0000000..8224543 --- /dev/null +++ b/mediapipe/graphs/face_detection/face_detection_full_range_mobile_gpu.pbtxt @@ -0,0 +1,60 @@ +# MediaPipe graph that performs face detection with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipie/examples/android/src/java/com/mediapipe/apps/facedetectiongpu and +# mediapipie/examples/ios/facedetectiongpu. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToDetectionsCalculator downstream in the graph to finish +# generating the corresponding detections before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToDetectionsCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Detects faces. +node { + calculator: "FaceDetectionFullRangeGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt b/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt new file mode 100644 index 0000000..681d2db --- /dev/null +++ b/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt @@ -0,0 +1,76 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on CPU. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" +# Detected faces. (std::vector) +output_stream: "face_detections" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transfers the input image from GPU to CPU memory for the purpose of +# demonstrating a CPU-based pipeline. Note that the input image on GPU has the +# origin defined at the bottom-left corner (OpenGL convention). As a result, +# the transferred image on CPU also shares the same representation. +node: { + calculator: "GpuBufferToImageFrameCalculator" + input_stream: "throttled_input_video" + output_stream: "input_video_cpu" +} + +# Subgraph that detects faces. +node { + calculator: "FaceDetectionShortRangeCpu" + input_stream: "IMAGE:input_video_cpu" + output_stream: "DETECTIONS:face_detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:face_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video_cpu" + input_stream: "render_data" + output_stream: "IMAGE:output_video_cpu" +} + +# Transfers the annotated image from CPU back to GPU memory, to be sent out of +# the graph. +node: { + calculator: "ImageFrameToGpuBufferCalculator" + input_stream: "output_video_cpu" + output_stream: "output_video" +} diff --git a/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt b/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt new file mode 100644 index 0000000..d235d1c --- /dev/null +++ b/mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt @@ -0,0 +1,58 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" +# Detected faces. (std::vector) +output_stream: "face_detections" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that detects faces. +node { + calculator: "FaceDetectionShortRangeGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:face_detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:face_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/face_effect/BUILD b/mediapipe/graphs/face_effect/BUILD new file mode 100644 index 0000000..69d648e --- /dev/null +++ b/mediapipe/graphs/face_effect/BUILD @@ -0,0 +1,44 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "face_effect_gpu_deps", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:immediate_mux_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/framework/tool:switch_container", + "//mediapipe/graphs/face_effect/subgraphs:single_face_geometry_from_detection_gpu", + "//mediapipe/graphs/face_effect/subgraphs:single_face_geometry_from_landmarks_gpu", + "//mediapipe/modules/face_geometry:effect_renderer_calculator", + "//mediapipe/modules/face_geometry:env_generator_calculator", + ], +) + +mediapipe_binary_graph( + name = "face_effect_gpu_binary_graph", + graph = "face_effect_gpu.pbtxt", + output_name = "face_effect_gpu.binarypb", + deps = [":face_effect_gpu_deps"], +) diff --git a/mediapipe/graphs/face_effect/data/BUILD b/mediapipe/graphs/face_effect/data/BUILD new file mode 100644 index 0000000..9993699 --- /dev/null +++ b/mediapipe/graphs/face_effect/data/BUILD @@ -0,0 +1,47 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework:encode_binary_proto.bzl", "encode_binary_proto") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +encode_binary_proto( + name = "axis", + input = "axis.pbtxt", + message_type = "mediapipe.face_geometry.Mesh3d", + output = "axis.binarypb", + deps = [ + "//mediapipe/modules/face_geometry/protos:mesh_3d_proto", + ], +) + +encode_binary_proto( + name = "glasses", + input = "glasses.pbtxt", + message_type = "mediapipe.face_geometry.Mesh3d", + output = "glasses.binarypb", + deps = [ + "//mediapipe/modules/face_geometry/protos:mesh_3d_proto", + ], +) + +# `.pngblob` is used instead of `.png` to prevent iOS build from preprocessing the image. +# OpenCV is unable to read a PNG file preprocessed by the iOS build. +exports_files([ + "axis.pngblob", + "facepaint.pngblob", + "glasses.pngblob", +]) diff --git a/mediapipe/graphs/face_effect/data/axis.pbtxt b/mediapipe/graphs/face_effect/data/axis.pbtxt new file mode 100644 index 0000000..6a3fd52 --- /dev/null +++ b/mediapipe/graphs/face_effect/data/axis.pbtxt @@ -0,0 +1,320 @@ +vertex_type: VERTEX_PT +primitive_type: TRIANGLE +vertex_buffer: -0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.873006 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.928502 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.928502 +vertex_buffer: 0.750000 +vertex_buffer: -0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.873006 +vertex_buffer: 0.750000 +vertex_buffer: 0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.928502 +vertex_buffer: 0.500000 +vertex_buffer: -0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.873006 +vertex_buffer: 0.500000 +vertex_buffer: 0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.928502 +vertex_buffer: 0.250000 +vertex_buffer: -0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.873006 +vertex_buffer: 0.250000 +vertex_buffer: 0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.928502 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 11.500000 +vertex_buffer: 0.873006 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.983999 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.983999 +vertex_buffer: 0.750000 +vertex_buffer: -0.100000 +vertex_buffer: -0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.817509 +vertex_buffer: 1.000000 +vertex_buffer: -0.100000 +vertex_buffer: 0.100000 +vertex_buffer: 8.500000 +vertex_buffer: 0.817509 +vertex_buffer: 0.750000 +vertex_buffer: 3.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.069341 +vertex_buffer: 1.000000 +vertex_buffer: 3.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.123429 +vertex_buffer: 1.000000 +vertex_buffer: 3.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.123429 +vertex_buffer: 0.750000 +vertex_buffer: 3.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.069341 +vertex_buffer: 0.750000 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.123419 +vertex_buffer: 0.499992 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.069341 +vertex_buffer: 0.500000 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.123429 +vertex_buffer: 0.250000 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.069341 +vertex_buffer: 0.250000 +vertex_buffer: 3.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.123429 +vertex_buffer: 0.000000 +vertex_buffer: 3.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.069341 +vertex_buffer: 0.000000 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.177516 +vertex_buffer: 1.000000 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.400000 +vertex_buffer: 0.177516 +vertex_buffer: 0.750000 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.015254 +vertex_buffer: 1.000000 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: 8.600000 +vertex_buffer: 0.015254 +vertex_buffer: 0.750000 +vertex_buffer: -0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.472252 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.527748 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.527748 +vertex_buffer: 0.750000 +vertex_buffer: -0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.472252 +vertex_buffer: 0.750000 +vertex_buffer: 0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.527748 +vertex_buffer: 0.500000 +vertex_buffer: -0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.472252 +vertex_buffer: 0.500000 +vertex_buffer: 0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.527748 +vertex_buffer: 0.250000 +vertex_buffer: -0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.472252 +vertex_buffer: 0.250000 +vertex_buffer: 0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.527748 +vertex_buffer: 0.000000 +vertex_buffer: -0.100000 +vertex_buffer: 0.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.472252 +vertex_buffer: 0.000000 +vertex_buffer: 0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.583245 +vertex_buffer: 1.000000 +vertex_buffer: 0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.583245 +vertex_buffer: 0.750000 +vertex_buffer: -0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.400000 +vertex_buffer: 0.416755 +vertex_buffer: 1.000000 +vertex_buffer: -0.100000 +vertex_buffer: 3.000000 +vertex_buffer: 8.600000 +vertex_buffer: 0.416755 +vertex_buffer: 0.750000 +index_buffer: 0 +index_buffer: 1 +index_buffer: 2 +index_buffer: 0 +index_buffer: 2 +index_buffer: 3 +index_buffer: 3 +index_buffer: 2 +index_buffer: 4 +index_buffer: 3 +index_buffer: 4 +index_buffer: 5 +index_buffer: 5 +index_buffer: 4 +index_buffer: 6 +index_buffer: 5 +index_buffer: 6 +index_buffer: 7 +index_buffer: 7 +index_buffer: 6 +index_buffer: 8 +index_buffer: 7 +index_buffer: 8 +index_buffer: 9 +index_buffer: 1 +index_buffer: 10 +index_buffer: 11 +index_buffer: 1 +index_buffer: 11 +index_buffer: 2 +index_buffer: 12 +index_buffer: 0 +index_buffer: 3 +index_buffer: 12 +index_buffer: 3 +index_buffer: 13 +index_buffer: 14 +index_buffer: 15 +index_buffer: 16 +index_buffer: 14 +index_buffer: 16 +index_buffer: 17 +index_buffer: 17 +index_buffer: 16 +index_buffer: 18 +index_buffer: 17 +index_buffer: 18 +index_buffer: 19 +index_buffer: 19 +index_buffer: 18 +index_buffer: 20 +index_buffer: 19 +index_buffer: 20 +index_buffer: 21 +index_buffer: 21 +index_buffer: 20 +index_buffer: 22 +index_buffer: 21 +index_buffer: 22 +index_buffer: 23 +index_buffer: 15 +index_buffer: 24 +index_buffer: 25 +index_buffer: 15 +index_buffer: 25 +index_buffer: 16 +index_buffer: 26 +index_buffer: 14 +index_buffer: 17 +index_buffer: 26 +index_buffer: 17 +index_buffer: 27 +index_buffer: 28 +index_buffer: 29 +index_buffer: 30 +index_buffer: 28 +index_buffer: 30 +index_buffer: 31 +index_buffer: 31 +index_buffer: 30 +index_buffer: 32 +index_buffer: 31 +index_buffer: 32 +index_buffer: 33 +index_buffer: 33 +index_buffer: 32 +index_buffer: 34 +index_buffer: 33 +index_buffer: 34 +index_buffer: 35 +index_buffer: 35 +index_buffer: 34 +index_buffer: 36 +index_buffer: 35 +index_buffer: 36 +index_buffer: 37 +index_buffer: 29 +index_buffer: 38 +index_buffer: 39 +index_buffer: 29 +index_buffer: 39 +index_buffer: 30 +index_buffer: 40 +index_buffer: 28 +index_buffer: 31 +index_buffer: 40 +index_buffer: 31 +index_buffer: 41 diff --git a/mediapipe/graphs/face_effect/data/axis.pngblob b/mediapipe/graphs/face_effect/data/axis.pngblob new file mode 100644 index 0000000..3c36c78 Binary files /dev/null and b/mediapipe/graphs/face_effect/data/axis.pngblob differ diff --git a/mediapipe/graphs/face_effect/data/facepaint.pngblob b/mediapipe/graphs/face_effect/data/facepaint.pngblob new file mode 100644 index 0000000..7658149 Binary files /dev/null and b/mediapipe/graphs/face_effect/data/facepaint.pngblob differ diff --git a/mediapipe/graphs/face_effect/data/glasses.pbtxt b/mediapipe/graphs/face_effect/data/glasses.pbtxt new file mode 100644 index 0000000..dcfef52 --- /dev/null +++ b/mediapipe/graphs/face_effect/data/glasses.pbtxt @@ -0,0 +1,27815 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +vertex_type: VERTEX_PT +primitive_type: TRIANGLE +vertex_buffer: 6.466000 +vertex_buffer: 3.606349 +vertex_buffer: 5.547389 +vertex_buffer: 0.456900 +vertex_buffer: 0.660100 +vertex_buffer: 6.568000 +vertex_buffer: 3.419411 +vertex_buffer: 5.525260 +vertex_buffer: 0.475100 +vertex_buffer: 0.668900 +vertex_buffer: 7.041000 +vertex_buffer: 3.393790 +vertex_buffer: 5.444396 +vertex_buffer: 0.485100 +vertex_buffer: 0.711200 +vertex_buffer: 7.047000 +vertex_buffer: 3.546470 +vertex_buffer: 5.447816 +vertex_buffer: 0.471100 +vertex_buffer: 0.711700 +vertex_buffer: 6.632000 +vertex_buffer: 3.918372 +vertex_buffer: 5.528612 +vertex_buffer: 0.431000 +vertex_buffer: 0.678400 +vertex_buffer: -7.047000 +vertex_buffer: 3.546470 +vertex_buffer: 5.447816 +vertex_buffer: 0.471100 +vertex_buffer: 0.711700 +vertex_buffer: -7.041000 +vertex_buffer: 3.393790 +vertex_buffer: 5.444396 +vertex_buffer: 0.485100 +vertex_buffer: 0.711200 +vertex_buffer: -6.568000 +vertex_buffer: 3.419411 +vertex_buffer: 5.525260 +vertex_buffer: 0.475100 +vertex_buffer: 0.668900 +vertex_buffer: -6.466000 +vertex_buffer: 3.606349 +vertex_buffer: 5.547389 +vertex_buffer: 0.456900 +vertex_buffer: 0.660100 +vertex_buffer: -6.632000 +vertex_buffer: 3.918372 +vertex_buffer: 5.528612 +vertex_buffer: 0.431000 +vertex_buffer: 0.678400 +vertex_buffer: 5.129000 +vertex_buffer: 4.348364 +vertex_buffer: 5.535068 +vertex_buffer: 0.359600 +vertex_buffer: 0.551300 +vertex_buffer: 5.128000 +vertex_buffer: 4.330357 +vertex_buffer: 5.541740 +vertex_buffer: 0.361200 +vertex_buffer: 0.550600 +vertex_buffer: 5.567000 +vertex_buffer: 4.268441 +vertex_buffer: 5.472582 +vertex_buffer: 0.370600 +vertex_buffer: 0.590500 +vertex_buffer: 5.571000 +vertex_buffer: 4.286449 +vertex_buffer: 5.464946 +vertex_buffer: 0.369000 +vertex_buffer: 0.591400 +vertex_buffer: 5.118000 +vertex_buffer: 4.243356 +vertex_buffer: 5.541220 +vertex_buffer: 0.368700 +vertex_buffer: 0.548600 +vertex_buffer: 5.541000 +vertex_buffer: 4.185438 +vertex_buffer: 5.475054 +vertex_buffer: 0.377700 +vertex_buffer: 0.587200 +vertex_buffer: 4.708000 +vertex_buffer: 4.384293 +vertex_buffer: 5.593466 +vertex_buffer: 0.352000 +vertex_buffer: 0.513000 +vertex_buffer: 4.708000 +vertex_buffer: 4.367285 +vertex_buffer: 5.600235 +vertex_buffer: 0.353600 +vertex_buffer: 0.512400 +vertex_buffer: 4.253000 +vertex_buffer: 4.405225 +vertex_buffer: 5.649831 +vertex_buffer: 0.345200 +vertex_buffer: 0.471700 +vertex_buffer: 4.253000 +vertex_buffer: 4.387217 +vertex_buffer: 5.656703 +vertex_buffer: 0.346800 +vertex_buffer: 0.471000 +vertex_buffer: 4.251000 +vertex_buffer: 4.301218 +vertex_buffer: 5.656061 +vertex_buffer: 0.354400 +vertex_buffer: 0.469500 +vertex_buffer: 4.704000 +vertex_buffer: 4.280284 +vertex_buffer: 5.600062 +vertex_buffer: 0.361200 +vertex_buffer: 0.510800 +vertex_buffer: 3.712000 +vertex_buffer: 4.403163 +vertex_buffer: 5.701223 +vertex_buffer: 0.338700 +vertex_buffer: 0.422600 +vertex_buffer: 3.713000 +vertex_buffer: 4.386153 +vertex_buffer: 5.709153 +vertex_buffer: 0.340400 +vertex_buffer: 0.422100 +vertex_buffer: 3.035000 +vertex_buffer: 4.374107 +vertex_buffer: 5.746506 +vertex_buffer: 0.332700 +vertex_buffer: 0.361400 +vertex_buffer: 3.035000 +vertex_buffer: 4.357099 +vertex_buffer: 5.754267 +vertex_buffer: 0.334300 +vertex_buffer: 0.360700 +vertex_buffer: 3.034000 +vertex_buffer: 4.271099 +vertex_buffer: 5.753122 +vertex_buffer: 0.341900 +vertex_buffer: 0.358900 +vertex_buffer: 3.711000 +vertex_buffer: 4.300154 +vertex_buffer: 5.708760 +vertex_buffer: 0.348000 +vertex_buffer: 0.420400 +vertex_buffer: 2.359000 +vertex_buffer: 4.303060 +vertex_buffer: 5.785590 +vertex_buffer: 0.329900 +vertex_buffer: 0.299400 +vertex_buffer: 2.358000 +vertex_buffer: 4.321068 +vertex_buffer: 5.778964 +vertex_buffer: 0.328100 +vertex_buffer: 0.300100 +vertex_buffer: 1.820000 +vertex_buffer: 4.245048 +vertex_buffer: 5.795635 +vertex_buffer: 0.324600 +vertex_buffer: 0.251100 +vertex_buffer: 1.823000 +vertex_buffer: 4.228040 +vertex_buffer: 5.802235 +vertex_buffer: 0.326400 +vertex_buffer: 0.250600 +vertex_buffer: 1.836000 +vertex_buffer: 4.142041 +vertex_buffer: 5.800324 +vertex_buffer: 0.334400 +vertex_buffer: 0.249400 +vertex_buffer: 2.362000 +vertex_buffer: 4.217062 +vertex_buffer: 5.783886 +vertex_buffer: 0.337600 +vertex_buffer: 0.297700 +vertex_buffer: 1.410000 +vertex_buffer: 4.152036 +vertex_buffer: 5.806158 +vertex_buffer: 0.322400 +vertex_buffer: 0.212900 +vertex_buffer: 1.405000 +vertex_buffer: 4.170044 +vertex_buffer: 5.799617 +vertex_buffer: 0.320500 +vertex_buffer: 0.213200 +vertex_buffer: 5.551000 +vertex_buffer: 4.106152 +vertex_buffer: 5.709693 +vertex_buffer: 0.402100 +vertex_buffer: 0.579100 +vertex_buffer: 5.486000 +vertex_buffer: 3.986141 +vertex_buffer: 5.718381 +vertex_buffer: 0.412000 +vertex_buffer: 0.572000 +vertex_buffer: 5.819000 +vertex_buffer: 3.920209 +vertex_buffer: 5.663535 +vertex_buffer: 0.421100 +vertex_buffer: 0.602200 +vertex_buffer: 6.010000 +vertex_buffer: 4.027245 +vertex_buffer: 5.632739 +vertex_buffer: 0.413400 +vertex_buffer: 0.620600 +vertex_buffer: 6.101000 +vertex_buffer: 3.839266 +vertex_buffer: 5.615214 +vertex_buffer: 0.431300 +vertex_buffer: 0.627700 +vertex_buffer: 6.386000 +vertex_buffer: 3.956323 +vertex_buffer: 5.569576 +vertex_buffer: 0.423500 +vertex_buffer: 0.654600 +vertex_buffer: 6.428000 +vertex_buffer: 3.592345 +vertex_buffer: 5.550429 +vertex_buffer: 0.457600 +vertex_buffer: 0.656300 +vertex_buffer: 6.533000 +vertex_buffer: 3.412464 +vertex_buffer: 5.527380 +vertex_buffer: 0.474800 +vertex_buffer: 0.665700 +vertex_buffer: 6.595000 +vertex_buffer: 3.198286 +vertex_buffer: 5.512281 +vertex_buffer: 0.494800 +vertex_buffer: 0.670900 +vertex_buffer: 6.627000 +vertex_buffer: 3.198302 +vertex_buffer: 5.510076 +vertex_buffer: 0.495800 +vertex_buffer: 0.673400 +vertex_buffer: 6.619000 +vertex_buffer: 2.959405 +vertex_buffer: 5.501948 +vertex_buffer: 0.516700 +vertex_buffer: 0.670900 +vertex_buffer: 6.652000 +vertex_buffer: 2.957454 +vertex_buffer: 5.499708 +vertex_buffer: 0.517700 +vertex_buffer: 0.673200 +vertex_buffer: 6.653000 +vertex_buffer: 2.716597 +vertex_buffer: 5.493766 +vertex_buffer: 0.538700 +vertex_buffer: 0.668700 +vertex_buffer: 6.617000 +vertex_buffer: 2.717522 +vertex_buffer: 5.497011 +vertex_buffer: 0.537900 +vertex_buffer: 0.666200 +vertex_buffer: 6.539000 +vertex_buffer: 2.186655 +vertex_buffer: 5.498873 +vertex_buffer: 0.579700 +vertex_buffer: 0.642800 +vertex_buffer: 6.383000 +vertex_buffer: 1.687333 +vertex_buffer: 5.513467 +vertex_buffer: 0.614200 +vertex_buffer: 0.610700 +vertex_buffer: 6.420000 +vertex_buffer: 1.682484 +vertex_buffer: 5.510983 +vertex_buffer: 0.615100 +vertex_buffer: 0.613800 +vertex_buffer: 6.578000 +vertex_buffer: 2.182802 +vertex_buffer: 5.495497 +vertex_buffer: 0.580600 +vertex_buffer: 0.645900 +vertex_buffer: 6.180000 +vertex_buffer: 1.161807 +vertex_buffer: 5.531222 +vertex_buffer: 0.648100 +vertex_buffer: 0.572700 +vertex_buffer: 6.214000 +vertex_buffer: 1.153944 +vertex_buffer: 5.529571 +vertex_buffer: 0.649300 +vertex_buffer: 0.575600 +vertex_buffer: 5.976000 +vertex_buffer: 0.774659 +vertex_buffer: 5.548143 +vertex_buffer: 0.672000 +vertex_buffer: 0.540800 +vertex_buffer: 6.006000 +vertex_buffer: 0.759837 +vertex_buffer: 5.546113 +vertex_buffer: 0.674000 +vertex_buffer: 0.543200 +vertex_buffer: 5.738000 +vertex_buffer: 0.483219 +vertex_buffer: 5.569363 +vertex_buffer: 0.689400 +vertex_buffer: 0.510800 +vertex_buffer: 5.765000 +vertex_buffer: 0.459438 +vertex_buffer: 5.566656 +vertex_buffer: 0.692000 +vertex_buffer: 0.512700 +vertex_buffer: 5.435000 +vertex_buffer: 0.241572 +vertex_buffer: 5.597222 +vertex_buffer: 0.703400 +vertex_buffer: 0.477900 +vertex_buffer: 5.459000 +vertex_buffer: 0.213714 +vertex_buffer: 5.594969 +vertex_buffer: 0.706300 +vertex_buffer: 0.479400 +vertex_buffer: 5.043000 +vertex_buffer: 0.047655 +vertex_buffer: 5.633954 +vertex_buffer: 0.714700 +vertex_buffer: 0.439400 +vertex_buffer: 5.060000 +vertex_buffer: 0.015697 +vertex_buffer: 5.632053 +vertex_buffer: 0.717800 +vertex_buffer: 0.440200 +vertex_buffer: 4.534000 +vertex_buffer: -0.103409 +vertex_buffer: 5.679254 +vertex_buffer: 0.723700 +vertex_buffer: 0.391800 +vertex_buffer: 4.543000 +vertex_buffer: -0.140581 +vertex_buffer: 5.678630 +vertex_buffer: 0.726900 +vertex_buffer: 0.391800 +vertex_buffer: 4.005000 +vertex_buffer: -0.195870 +vertex_buffer: 5.728422 +vertex_buffer: 0.728400 +vertex_buffer: 0.343300 +vertex_buffer: 4.007000 +vertex_buffer: -0.236085 +vertex_buffer: 5.727403 +vertex_buffer: 0.731900 +vertex_buffer: 0.342800 +vertex_buffer: 1.098000 +vertex_buffer: 4.116043 +vertex_buffer: 5.800600 +vertex_buffer: 0.315600 +vertex_buffer: 0.185500 +vertex_buffer: 1.103000 +vertex_buffer: 4.098034 +vertex_buffer: 5.808094 +vertex_buffer: 0.317600 +vertex_buffer: 0.185200 +vertex_buffer: 1.126000 +vertex_buffer: 4.010036 +vertex_buffer: 5.805807 +vertex_buffer: 0.325900 +vertex_buffer: 0.184200 +vertex_buffer: 1.432000 +vertex_buffer: 4.066038 +vertex_buffer: 5.804105 +vertex_buffer: 0.330600 +vertex_buffer: 0.212100 +vertex_buffer: 0.883999 +vertex_buffer: 4.081041 +vertex_buffer: 5.802060 +vertex_buffer: 0.311200 +vertex_buffer: 0.166300 +vertex_buffer: 0.887998 +vertex_buffer: 4.062030 +vertex_buffer: 5.809504 +vertex_buffer: 0.313200 +vertex_buffer: 0.166000 +vertex_buffer: 0.162758 +vertex_buffer: 3.804688 +vertex_buffer: 6.122684 +vertex_buffer: 0.333600 +vertex_buffer: 0.088700 +vertex_buffer: -0.000000 +vertex_buffer: 3.803644 +vertex_buffer: 6.128218 +vertex_buffer: 0.327400 +vertex_buffer: 0.075700 +vertex_buffer: -0.000000 +vertex_buffer: 3.547017 +vertex_buffer: 6.117342 +vertex_buffer: 0.347200 +vertex_buffer: 0.065200 +vertex_buffer: 0.165816 +vertex_buffer: 3.544578 +vertex_buffer: 6.111334 +vertex_buffer: 0.354600 +vertex_buffer: 0.078000 +vertex_buffer: 0.313999 +vertex_buffer: 3.805656 +vertex_buffer: 6.117985 +vertex_buffer: 0.339400 +vertex_buffer: 0.100800 +vertex_buffer: 0.319898 +vertex_buffer: 3.542312 +vertex_buffer: 6.106232 +vertex_buffer: 0.361400 +vertex_buffer: 0.090000 +vertex_buffer: 0.584504 +vertex_buffer: 3.526286 +vertex_buffer: 6.088815 +vertex_buffer: 0.371900 +vertex_buffer: 0.112100 +vertex_buffer: 0.582000 +vertex_buffer: 3.814673 +vertex_buffer: 6.103284 +vertex_buffer: 0.347300 +vertex_buffer: 0.124400 +vertex_buffer: 0.432000 +vertex_buffer: 3.031033 +vertex_buffer: 6.071945 +vertex_buffer: 0.411000 +vertex_buffer: 0.070300 +vertex_buffer: 0.522999 +vertex_buffer: 2.920398 +vertex_buffer: 6.062136 +vertex_buffer: 0.424300 +vertex_buffer: 0.072800 +vertex_buffer: 0.547999 +vertex_buffer: 2.930367 +vertex_buffer: 6.069046 +vertex_buffer: 0.423500 +vertex_buffer: 0.076700 +vertex_buffer: 0.452999 +vertex_buffer: 3.052091 +vertex_buffer: 6.080023 +vertex_buffer: 0.409300 +vertex_buffer: 0.074700 +vertex_buffer: 0.591999 +vertex_buffer: 2.935434 +vertex_buffer: 6.066682 +vertex_buffer: 0.423800 +vertex_buffer: 0.082000 +vertex_buffer: 0.500000 +vertex_buffer: 3.112521 +vertex_buffer: 6.080370 +vertex_buffer: 0.405500 +vertex_buffer: 0.082900 +vertex_buffer: 0.625000 +vertex_buffer: 2.745313 +vertex_buffer: 6.055617 +vertex_buffer: 0.442100 +vertex_buffer: 0.076800 +vertex_buffer: 0.664999 +vertex_buffer: 2.741312 +vertex_buffer: 6.054260 +vertex_buffer: 0.442900 +vertex_buffer: 0.080800 +vertex_buffer: 0.600999 +vertex_buffer: 2.743412 +vertex_buffer: 6.049767 +vertex_buffer: 0.442400 +vertex_buffer: 0.073800 +vertex_buffer: 0.872000 +vertex_buffer: 1.935933 +vertex_buffer: 6.014896 +vertex_buffer: 0.519600 +vertex_buffer: 0.080100 +vertex_buffer: 0.889000 +vertex_buffer: 1.937704 +vertex_buffer: 6.020942 +vertex_buffer: 0.519200 +vertex_buffer: 0.081200 +vertex_buffer: 0.797000 +vertex_buffer: 2.211378 +vertex_buffer: 6.028312 +vertex_buffer: 0.493200 +vertex_buffer: 0.078100 +vertex_buffer: 0.777999 +vertex_buffer: 2.209563 +vertex_buffer: 6.022314 +vertex_buffer: 0.493700 +vertex_buffer: 0.076500 +vertex_buffer: 0.915999 +vertex_buffer: 1.941734 +vertex_buffer: 6.021025 +vertex_buffer: 0.519000 +vertex_buffer: 0.083200 +vertex_buffer: 0.825999 +vertex_buffer: 2.213393 +vertex_buffer: 6.028286 +vertex_buffer: 0.493200 +vertex_buffer: 0.080600 +vertex_buffer: 0.994000 +vertex_buffer: 1.653878 +vertex_buffer: 6.013682 +vertex_buffer: 0.546000 +vertex_buffer: 0.086400 +vertex_buffer: 1.019000 +vertex_buffer: 1.656937 +vertex_buffer: 6.012816 +vertex_buffer: 0.546000 +vertex_buffer: 0.088000 +vertex_buffer: 0.978000 +vertex_buffer: 1.652161 +vertex_buffer: 6.007595 +vertex_buffer: 0.546300 +vertex_buffer: 0.085500 +vertex_buffer: 1.470000 +vertex_buffer: 0.609648 +vertex_buffer: 5.964523 +vertex_buffer: 0.642600 +vertex_buffer: 0.119900 +vertex_buffer: 1.273000 +vertex_buffer: 0.991726 +vertex_buffer: 5.987666 +vertex_buffer: 0.607500 +vertex_buffer: 0.104800 +vertex_buffer: 1.259000 +vertex_buffer: 0.989106 +vertex_buffer: 5.981474 +vertex_buffer: 0.607700 +vertex_buffer: 0.104000 +vertex_buffer: 1.457000 +vertex_buffer: 0.605091 +vertex_buffer: 5.958188 +vertex_buffer: 0.642900 +vertex_buffer: 0.119100 +vertex_buffer: 1.490000 +vertex_buffer: 0.618772 +vertex_buffer: 5.964130 +vertex_buffer: 0.642200 +vertex_buffer: 0.121200 +vertex_buffer: 1.296000 +vertex_buffer: 0.995807 +vertex_buffer: 5.986959 +vertex_buffer: 0.607500 +vertex_buffer: 0.106300 +vertex_buffer: 1.726000 +vertex_buffer: 0.268501 +vertex_buffer: 5.934266 +vertex_buffer: 0.674500 +vertex_buffer: 0.140600 +vertex_buffer: 1.708000 +vertex_buffer: 0.254355 +vertex_buffer: 5.934185 +vertex_buffer: 0.675200 +vertex_buffer: 0.139400 +vertex_buffer: 1.697000 +vertex_buffer: 0.246832 +vertex_buffer: 5.927583 +vertex_buffer: 0.675700 +vertex_buffer: 0.138700 +vertex_buffer: 1.990000 +vertex_buffer: -0.014744 +vertex_buffer: 5.901462 +vertex_buffer: 0.700200 +vertex_buffer: 0.163400 +vertex_buffer: 1.981000 +vertex_buffer: -0.024212 +vertex_buffer: 5.894596 +vertex_buffer: 0.700700 +vertex_buffer: 0.162700 +vertex_buffer: 2.005000 +vertex_buffer: 0.001400 +vertex_buffer: 5.901891 +vertex_buffer: 0.699300 +vertex_buffer: 0.164500 +vertex_buffer: 2.328000 +vertex_buffer: -0.193438 +vertex_buffer: 5.866342 +vertex_buffer: 0.718200 +vertex_buffer: 0.192400 +vertex_buffer: 2.316000 +vertex_buffer: -0.211562 +vertex_buffer: 5.865541 +vertex_buffer: 0.719200 +vertex_buffer: 0.191400 +vertex_buffer: 2.308000 +vertex_buffer: -0.221987 +vertex_buffer: 5.858471 +vertex_buffer: 0.719700 +vertex_buffer: 0.190800 +vertex_buffer: 2.686000 +vertex_buffer: -0.346309 +vertex_buffer: 5.827715 +vertex_buffer: 0.733700 +vertex_buffer: 0.223400 +vertex_buffer: 2.680000 +vertex_buffer: -0.359701 +vertex_buffer: 5.820293 +vertex_buffer: 0.734400 +vertex_buffer: 0.222800 +vertex_buffer: 2.695000 +vertex_buffer: -0.325289 +vertex_buffer: 5.829885 +vertex_buffer: 0.732400 +vertex_buffer: 0.224300 +vertex_buffer: 3.097000 +vertex_buffer: -0.408205 +vertex_buffer: 5.791085 +vertex_buffer: 0.742400 +vertex_buffer: 0.259800 +vertex_buffer: 3.091000 +vertex_buffer: -0.431304 +vertex_buffer: 5.789650 +vertex_buffer: 0.744000 +vertex_buffer: 0.258900 +vertex_buffer: 3.087000 +vertex_buffer: -0.445664 +vertex_buffer: 5.782089 +vertex_buffer: 0.744900 +vertex_buffer: 0.258300 +vertex_buffer: 3.520000 +vertex_buffer: -0.489680 +vertex_buffer: 5.740166 +vertex_buffer: 0.751200 +vertex_buffer: 0.296700 +vertex_buffer: 3.522000 +vertex_buffer: -0.475451 +vertex_buffer: 5.748737 +vertex_buffer: 0.750200 +vertex_buffer: 0.297200 +vertex_buffer: 3.525000 +vertex_buffer: -0.450484 +vertex_buffer: 5.751408 +vertex_buffer: 0.748300 +vertex_buffer: 0.297900 +vertex_buffer: 4.017000 +vertex_buffer: -0.437417 +vertex_buffer: 5.704529 +vertex_buffer: 0.749800 +vertex_buffer: 0.342100 +vertex_buffer: 4.018000 +vertex_buffer: -0.462352 +vertex_buffer: 5.701867 +vertex_buffer: 0.751900 +vertex_buffer: 0.341500 +vertex_buffer: 4.018000 +vertex_buffer: -0.477554 +vertex_buffer: 5.693193 +vertex_buffer: 0.753200 +vertex_buffer: 0.341100 +vertex_buffer: 5.564000 +vertex_buffer: 4.155161 +vertex_buffer: 5.702558 +vertex_buffer: 0.398100 +vertex_buffer: 0.581200 +vertex_buffer: 5.143000 +vertex_buffer: 4.212080 +vertex_buffer: 5.768786 +vertex_buffer: 0.389300 +vertex_buffer: 0.543200 +vertex_buffer: 5.141000 +vertex_buffer: 4.193074 +vertex_buffer: 5.774485 +vertex_buffer: 0.390900 +vertex_buffer: 0.542500 +vertex_buffer: 5.562000 +vertex_buffer: 4.136154 +vertex_buffer: 5.709178 +vertex_buffer: 0.399700 +vertex_buffer: 0.580600 +vertex_buffer: 5.138000 +vertex_buffer: 4.162073 +vertex_buffer: 5.774951 +vertex_buffer: 0.393500 +vertex_buffer: 0.541700 +vertex_buffer: 4.728000 +vertex_buffer: 4.230002 +vertex_buffer: 5.833516 +vertex_buffer: 0.383700 +vertex_buffer: 0.505100 +vertex_buffer: 4.727000 +vertex_buffer: 4.199002 +vertex_buffer: 5.833092 +vertex_buffer: 0.386300 +vertex_buffer: 0.504400 +vertex_buffer: 4.728000 +vertex_buffer: 4.249009 +vertex_buffer: 5.826763 +vertex_buffer: 0.382000 +vertex_buffer: 0.505700 +vertex_buffer: 4.274000 +vertex_buffer: 4.269942 +vertex_buffer: 5.883901 +vertex_buffer: 0.375400 +vertex_buffer: 0.464700 +vertex_buffer: 4.274000 +vertex_buffer: 4.250935 +vertex_buffer: 5.889763 +vertex_buffer: 0.377100 +vertex_buffer: 0.464000 +vertex_buffer: 4.274000 +vertex_buffer: 4.219934 +vertex_buffer: 5.889513 +vertex_buffer: 0.379800 +vertex_buffer: 0.463200 +vertex_buffer: 3.731000 +vertex_buffer: 4.250870 +vertex_buffer: 5.942575 +vertex_buffer: 0.370800 +vertex_buffer: 0.414800 +vertex_buffer: 3.731000 +vertex_buffer: 4.219869 +vertex_buffer: 5.942428 +vertex_buffer: 0.373500 +vertex_buffer: 0.414000 +vertex_buffer: 3.731000 +vertex_buffer: 4.268877 +vertex_buffer: 5.936660 +vertex_buffer: 0.369100 +vertex_buffer: 0.415600 +vertex_buffer: 3.050000 +vertex_buffer: 4.239822 +vertex_buffer: 5.981790 +vertex_buffer: 0.363300 +vertex_buffer: 0.354000 +vertex_buffer: 3.051000 +vertex_buffer: 4.221815 +vertex_buffer: 5.986576 +vertex_buffer: 0.365000 +vertex_buffer: 0.353200 +vertex_buffer: 3.050000 +vertex_buffer: 4.189816 +vertex_buffer: 5.986144 +vertex_buffer: 0.367800 +vertex_buffer: 0.352100 +vertex_buffer: 2.377000 +vertex_buffer: 4.135779 +vertex_buffer: 6.017364 +vertex_buffer: 0.363900 +vertex_buffer: 0.290700 +vertex_buffer: 2.376000 +vertex_buffer: 4.167778 +vertex_buffer: 6.017976 +vertex_buffer: 0.361000 +vertex_buffer: 0.291700 +vertex_buffer: 2.376000 +vertex_buffer: 4.185784 +vertex_buffer: 6.013332 +vertex_buffer: 0.359300 +vertex_buffer: 0.292700 +vertex_buffer: 1.854000 +vertex_buffer: 4.091757 +vertex_buffer: 6.035276 +vertex_buffer: 0.359300 +vertex_buffer: 0.243600 +vertex_buffer: 1.851000 +vertex_buffer: 4.110763 +vertex_buffer: 6.029667 +vertex_buffer: 0.357100 +vertex_buffer: 0.244500 +vertex_buffer: 1.858000 +vertex_buffer: 4.060758 +vertex_buffer: 6.033613 +vertex_buffer: 0.362400 +vertex_buffer: 0.242700 +vertex_buffer: 1.464000 +vertex_buffer: 3.979738 +vertex_buffer: 6.050185 +vertex_buffer: 0.361500 +vertex_buffer: 0.205700 +vertex_buffer: 1.454000 +vertex_buffer: 4.014737 +vertex_buffer: 6.051946 +vertex_buffer: 0.357500 +vertex_buffer: 0.206300 +vertex_buffer: 1.448000 +vertex_buffer: 4.033742 +vertex_buffer: 6.046346 +vertex_buffer: 0.354900 +vertex_buffer: 0.207000 +vertex_buffer: 5.538000 +vertex_buffer: 4.167421 +vertex_buffer: 5.488696 +vertex_buffer: 0.379500 +vertex_buffer: 0.586200 +vertex_buffer: 6.013000 +vertex_buffer: 4.086517 +vertex_buffer: 5.408722 +vertex_buffer: 0.390600 +vertex_buffer: 0.629300 +vertex_buffer: 6.022000 +vertex_buffer: 4.101534 +vertex_buffer: 5.394998 +vertex_buffer: 0.389000 +vertex_buffer: 0.630700 +vertex_buffer: 6.077000 +vertex_buffer: 4.173544 +vertex_buffer: 5.387243 +vertex_buffer: 0.383200 +vertex_buffer: 0.636700 +vertex_buffer: 6.396000 +vertex_buffer: 4.014595 +vertex_buffer: 5.344789 +vertex_buffer: 0.400200 +vertex_buffer: 0.663800 +vertex_buffer: 6.412000 +vertex_buffer: 4.027614 +vertex_buffer: 5.329050 +vertex_buffer: 0.398800 +vertex_buffer: 0.666000 +vertex_buffer: 6.648000 +vertex_buffer: 3.980372 +vertex_buffer: 5.528382 +vertex_buffer: 0.425200 +vertex_buffer: 0.680600 +vertex_buffer: 6.636000 +vertex_buffer: 3.990376 +vertex_buffer: 5.524715 +vertex_buffer: 0.423500 +vertex_buffer: 0.679800 +vertex_buffer: 6.603000 +vertex_buffer: 3.981370 +vertex_buffer: 5.529604 +vertex_buffer: 0.423400 +vertex_buffer: 0.676200 +vertex_buffer: 6.609000 +vertex_buffer: 3.966365 +vertex_buffer: 5.534145 +vertex_buffer: 0.425500 +vertex_buffer: 0.676500 +vertex_buffer: 6.709000 +vertex_buffer: 4.010384 +vertex_buffer: 5.519073 +vertex_buffer: 0.424100 +vertex_buffer: 0.686700 +vertex_buffer: 6.691000 +vertex_buffer: 4.014387 +vertex_buffer: 5.516231 +vertex_buffer: 0.423000 +vertex_buffer: 0.685400 +vertex_buffer: 6.694000 +vertex_buffer: 3.950383 +vertex_buffer: 5.519313 +vertex_buffer: 0.429300 +vertex_buffer: 0.684400 +vertex_buffer: 1.149000 +vertex_buffer: 3.955715 +vertex_buffer: 6.069484 +vertex_buffer: 0.354100 +vertex_buffer: 0.177900 +vertex_buffer: 1.141000 +vertex_buffer: 3.976721 +vertex_buffer: 6.063966 +vertex_buffer: 0.351300 +vertex_buffer: 0.178400 +vertex_buffer: 1.165000 +vertex_buffer: 3.907718 +vertex_buffer: 6.067353 +vertex_buffer: 0.359400 +vertex_buffer: 0.177300 +vertex_buffer: 0.929000 +vertex_buffer: 3.915698 +vertex_buffer: 6.082634 +vertex_buffer: 0.350200 +vertex_buffer: 0.157500 +vertex_buffer: 0.948999 +vertex_buffer: 3.854702 +vertex_buffer: 6.080110 +vertex_buffer: 0.356600 +vertex_buffer: 0.156800 +vertex_buffer: 0.921000 +vertex_buffer: 3.939706 +vertex_buffer: 6.077227 +vertex_buffer: 0.347300 +vertex_buffer: 0.158000 +vertex_buffer: 0.160167 +vertex_buffer: 3.903592 +vertex_buffer: 5.833806 +vertex_buffer: 0.302700 +vertex_buffer: 0.100100 +vertex_buffer: 0.160167 +vertex_buffer: 3.920615 +vertex_buffer: 5.816254 +vertex_buffer: 0.300600 +vertex_buffer: 0.100800 +vertex_buffer: -0.000000 +vertex_buffer: 3.918019 +vertex_buffer: 5.819515 +vertex_buffer: 0.295700 +vertex_buffer: 0.087300 +vertex_buffer: -0.000000 +vertex_buffer: 3.900996 +vertex_buffer: 5.837088 +vertex_buffer: 0.297800 +vertex_buffer: 0.086600 +vertex_buffer: 0.159648 +vertex_buffer: 4.000685 +vertex_buffer: 5.818195 +vertex_buffer: 0.293800 +vertex_buffer: 0.103200 +vertex_buffer: -0.000000 +vertex_buffer: 3.996017 +vertex_buffer: 5.821312 +vertex_buffer: 0.289200 +vertex_buffer: 0.089600 +vertex_buffer: 5.119000 +vertex_buffer: 4.343422 +vertex_buffer: 5.488109 +vertex_buffer: 0.355200 +vertex_buffer: 0.552300 +vertex_buffer: 4.698000 +vertex_buffer: 4.380351 +vertex_buffer: 5.546519 +vertex_buffer: 0.347500 +vertex_buffer: 0.513800 +vertex_buffer: 4.705000 +vertex_buffer: 4.390320 +vertex_buffer: 5.571572 +vertex_buffer: 0.350000 +vertex_buffer: 0.513500 +vertex_buffer: 5.127000 +vertex_buffer: 4.354390 +vertex_buffer: 5.513198 +vertex_buffer: 0.357600 +vertex_buffer: 0.551900 +vertex_buffer: 5.569000 +vertex_buffer: 4.291474 +vertex_buffer: 5.443092 +vertex_buffer: 0.367200 +vertex_buffer: 0.592200 +vertex_buffer: 5.562000 +vertex_buffer: 4.280505 +vertex_buffer: 5.417939 +vertex_buffer: 0.364800 +vertex_buffer: 0.592800 +vertex_buffer: 6.087000 +vertex_buffer: 4.193582 +vertex_buffer: 5.356695 +vertex_buffer: 0.380200 +vertex_buffer: 0.639300 +vertex_buffer: 6.080000 +vertex_buffer: 4.181612 +vertex_buffer: 5.331465 +vertex_buffer: 0.377800 +vertex_buffer: 0.640100 +vertex_buffer: 6.505000 +vertex_buffer: 4.091699 +vertex_buffer: 5.260408 +vertex_buffer: 0.390100 +vertex_buffer: 0.678900 +vertex_buffer: 6.513000 +vertex_buffer: 4.103667 +vertex_buffer: 5.285665 +vertex_buffer: 0.392400 +vertex_buffer: 0.678000 +vertex_buffer: 6.087000 +vertex_buffer: 4.188555 +vertex_buffer: 5.378542 +vertex_buffer: 0.382000 +vertex_buffer: 0.638200 +vertex_buffer: 6.511000 +vertex_buffer: 4.099640 +vertex_buffer: 5.307516 +vertex_buffer: 0.394000 +vertex_buffer: 0.676700 +vertex_buffer: 6.729000 +vertex_buffer: 4.042746 +vertex_buffer: 5.222074 +vertex_buffer: 0.399300 +vertex_buffer: 0.703000 +vertex_buffer: 6.705000 +vertex_buffer: 4.047737 +vertex_buffer: 5.227258 +vertex_buffer: 0.397800 +vertex_buffer: 0.699300 +vertex_buffer: 6.714000 +vertex_buffer: 4.059708 +vertex_buffer: 5.251531 +vertex_buffer: 0.400200 +vertex_buffer: 0.698200 +vertex_buffer: 6.739000 +vertex_buffer: 4.054713 +vertex_buffer: 5.247342 +vertex_buffer: 0.401700 +vertex_buffer: 0.702000 +vertex_buffer: 6.783000 +vertex_buffer: 4.044723 +vertex_buffer: 5.240010 +vertex_buffer: 0.403900 +vertex_buffer: 0.707400 +vertex_buffer: 6.775000 +vertex_buffer: 4.032753 +vertex_buffer: 5.214735 +vertex_buffer: 0.401700 +vertex_buffer: 0.708600 +vertex_buffer: 6.789000 +vertex_buffer: 4.044682 +vertex_buffer: 5.273914 +vertex_buffer: 0.406500 +vertex_buffer: 0.705900 +vertex_buffer: 6.744000 +vertex_buffer: 4.053670 +vertex_buffer: 5.282214 +vertex_buffer: 0.404400 +vertex_buffer: 0.700500 +vertex_buffer: 6.717000 +vertex_buffer: 4.056669 +vertex_buffer: 5.284348 +vertex_buffer: 0.402900 +vertex_buffer: 0.696400 +vertex_buffer: 7.399000 +vertex_buffer: 3.891082 +vertex_buffer: 4.944418 +vertex_buffer: 0.439200 +vertex_buffer: 0.772300 +vertex_buffer: 7.403000 +vertex_buffer: 3.869084 +vertex_buffer: 4.942898 +vertex_buffer: 0.441500 +vertex_buffer: 0.773500 +vertex_buffer: 7.386000 +vertex_buffer: 3.856103 +vertex_buffer: 4.927482 +vertex_buffer: 0.441600 +vertex_buffer: 0.775500 +vertex_buffer: 7.381000 +vertex_buffer: 3.878100 +vertex_buffer: 4.928988 +vertex_buffer: 0.438800 +vertex_buffer: 0.774100 +vertex_buffer: 7.384000 +vertex_buffer: 3.809105 +vertex_buffer: 4.926238 +vertex_buffer: 0.446700 +vertex_buffer: 0.777300 +vertex_buffer: 7.401000 +vertex_buffer: 3.821086 +vertex_buffer: 4.941622 +vertex_buffer: 0.446300 +vertex_buffer: 0.775100 +vertex_buffer: 7.409000 +vertex_buffer: 3.868048 +vertex_buffer: 4.971678 +vertex_buffer: 0.442300 +vertex_buffer: 0.771300 +vertex_buffer: 7.404000 +vertex_buffer: 3.891046 +vertex_buffer: 4.974203 +vertex_buffer: 0.439700 +vertex_buffer: 0.770200 +vertex_buffer: 7.370000 +vertex_buffer: 3.480112 +vertex_buffer: 4.919435 +vertex_buffer: 0.479700 +vertex_buffer: 0.779300 +vertex_buffer: 7.374000 +vertex_buffer: 3.562110 +vertex_buffer: 4.921218 +vertex_buffer: 0.471200 +vertex_buffer: 0.779400 +vertex_buffer: 7.391000 +vertex_buffer: 3.567091 +vertex_buffer: 4.936405 +vertex_buffer: 0.470800 +vertex_buffer: 0.777300 +vertex_buffer: 7.387000 +vertex_buffer: 3.475092 +vertex_buffer: 4.935404 +vertex_buffer: 0.480000 +vertex_buffer: 0.777000 +vertex_buffer: 7.396000 +vertex_buffer: 3.567056 +vertex_buffer: 4.965171 +vertex_buffer: 0.471000 +vertex_buffer: 0.774500 +vertex_buffer: 7.392000 +vertex_buffer: 3.476057 +vertex_buffer: 4.964184 +vertex_buffer: 0.479900 +vertex_buffer: 0.774300 +vertex_buffer: 7.300000 +vertex_buffer: 3.307002 +vertex_buffer: 4.947629 +vertex_buffer: 0.501900 +vertex_buffer: 0.772800 +vertex_buffer: 7.219000 +vertex_buffer: 3.261037 +vertex_buffer: 4.960480 +vertex_buffer: 0.510500 +vertex_buffer: 0.770100 +vertex_buffer: 7.200000 +vertex_buffer: 3.267001 +vertex_buffer: 4.942599 +vertex_buffer: 0.511600 +vertex_buffer: 0.772300 +vertex_buffer: 7.284000 +vertex_buffer: 3.314892 +vertex_buffer: 4.929779 +vertex_buffer: 0.502600 +vertex_buffer: 0.775000 +vertex_buffer: 7.140000 +vertex_buffer: 3.216015 +vertex_buffer: 4.973517 +vertex_buffer: 0.518700 +vertex_buffer: 0.767600 +vertex_buffer: 7.119000 +vertex_buffer: 3.219073 +vertex_buffer: 4.955586 +vertex_buffer: 0.520100 +vertex_buffer: 0.769600 +vertex_buffer: 7.226000 +vertex_buffer: 3.261802 +vertex_buffer: 4.996186 +vertex_buffer: 0.510100 +vertex_buffer: 0.766600 +vertex_buffer: 7.148000 +vertex_buffer: 3.216721 +vertex_buffer: 5.011203 +vertex_buffer: 0.518300 +vertex_buffer: 0.763900 +vertex_buffer: 7.306000 +vertex_buffer: 3.306853 +vertex_buffer: 4.979352 +vertex_buffer: 0.501700 +vertex_buffer: 0.769700 +vertex_buffer: 7.040000 +vertex_buffer: 2.917183 +vertex_buffer: 5.151618 +vertex_buffer: 0.547100 +vertex_buffer: 0.733700 +vertex_buffer: 7.082000 +vertex_buffer: 3.084179 +vertex_buffer: 5.147562 +vertex_buffer: 0.532900 +vertex_buffer: 0.743800 +vertex_buffer: 7.108000 +vertex_buffer: 3.089665 +vertex_buffer: 5.185693 +vertex_buffer: 0.529600 +vertex_buffer: 0.740700 +vertex_buffer: 7.057000 +vertex_buffer: 2.917507 +vertex_buffer: 5.194512 +vertex_buffer: 0.544200 +vertex_buffer: 0.730500 +vertex_buffer: 7.100000 +vertex_buffer: 3.160937 +vertex_buffer: 5.127523 +vertex_buffer: 0.526500 +vertex_buffer: 0.750500 +vertex_buffer: 7.164000 +vertex_buffer: 3.198917 +vertex_buffer: 5.165739 +vertex_buffer: 0.518500 +vertex_buffer: 0.748100 +vertex_buffer: 6.500000 +vertex_buffer: 1.098762 +vertex_buffer: 5.245932 +vertex_buffer: 0.679500 +vertex_buffer: 0.612400 +vertex_buffer: 6.488000 +vertex_buffer: 1.105705 +vertex_buffer: 5.200318 +vertex_buffer: 0.682600 +vertex_buffer: 0.614900 +vertex_buffer: 6.696000 +vertex_buffer: 1.657242 +vertex_buffer: 5.181634 +vertex_buffer: 0.644600 +vertex_buffer: 0.655700 +vertex_buffer: 6.709000 +vertex_buffer: 1.651988 +vertex_buffer: 5.227341 +vertex_buffer: 0.641700 +vertex_buffer: 0.653000 +vertex_buffer: 6.861000 +vertex_buffer: 2.196412 +vertex_buffer: 5.166074 +vertex_buffer: 0.604700 +vertex_buffer: 0.691300 +vertex_buffer: 6.874000 +vertex_buffer: 2.192830 +vertex_buffer: 5.211826 +vertex_buffer: 0.601900 +vertex_buffer: 0.688300 +vertex_buffer: 5.962000 +vertex_buffer: 0.282768 +vertex_buffer: 5.242886 +vertex_buffer: 0.734700 +vertex_buffer: 0.537600 +vertex_buffer: 6.248000 +vertex_buffer: 0.649427 +vertex_buffer: 5.219999 +vertex_buffer: 0.712100 +vertex_buffer: 0.575800 +vertex_buffer: 6.259000 +vertex_buffer: 0.640923 +vertex_buffer: 5.265498 +vertex_buffer: 0.708900 +vertex_buffer: 0.573700 +vertex_buffer: 5.972000 +vertex_buffer: 0.272725 +vertex_buffer: 5.289225 +vertex_buffer: 0.731300 +vertex_buffer: 0.535800 +vertex_buffer: 5.172000 +vertex_buffer: -0.212297 +vertex_buffer: 5.313697 +vertex_buffer: 0.764100 +vertex_buffer: 0.453200 +vertex_buffer: 5.616000 +vertex_buffer: -0.001235 +vertex_buffer: 5.273785 +vertex_buffer: 0.751500 +vertex_buffer: 0.498200 +vertex_buffer: 5.626000 +vertex_buffer: -0.011549 +vertex_buffer: 5.319041 +vertex_buffer: 0.747900 +vertex_buffer: 0.497000 +vertex_buffer: 5.180000 +vertex_buffer: -0.223953 +vertex_buffer: 5.359756 +vertex_buffer: 0.760300 +vertex_buffer: 0.452300 +vertex_buffer: 3.996000 +vertex_buffer: -0.458781 +vertex_buffer: 5.471938 +vertex_buffer: 0.771900 +vertex_buffer: 0.340400 +vertex_buffer: 3.991000 +vertex_buffer: -0.445829 +vertex_buffer: 5.426052 +vertex_buffer: 0.775800 +vertex_buffer: 0.340300 +vertex_buffer: 4.590000 +vertex_buffer: -0.361592 +vertex_buffer: 5.367642 +vertex_buffer: 0.772900 +vertex_buffer: 0.397200 +vertex_buffer: 4.597000 +vertex_buffer: -0.374536 +vertex_buffer: 5.414536 +vertex_buffer: 0.768800 +vertex_buffer: 0.396900 +vertex_buffer: 3.067000 +vertex_buffer: -0.415012 +vertex_buffer: 5.512823 +vertex_buffer: 0.766000 +vertex_buffer: 0.252200 +vertex_buffer: 3.496000 +vertex_buffer: -0.457892 +vertex_buffer: 5.473036 +vertex_buffer: 0.773200 +vertex_buffer: 0.293100 +vertex_buffer: 3.500000 +vertex_buffer: -0.470867 +vertex_buffer: 5.518908 +vertex_buffer: 0.769400 +vertex_buffer: 0.293800 +vertex_buffer: 3.068000 +vertex_buffer: -0.427044 +vertex_buffer: 5.559814 +vertex_buffer: 0.762300 +vertex_buffer: 0.253200 +vertex_buffer: 2.293000 +vertex_buffer: -0.193007 +vertex_buffer: 5.589015 +vertex_buffer: 0.737400 +vertex_buffer: 0.179300 +vertex_buffer: 2.662000 +vertex_buffer: -0.328805 +vertex_buffer: 5.552051 +vertex_buffer: 0.754000 +vertex_buffer: 0.213900 +vertex_buffer: 2.663000 +vertex_buffer: -0.340722 +vertex_buffer: 5.599068 +vertex_buffer: 0.750600 +vertex_buffer: 0.215500 +vertex_buffer: 2.293000 +vertex_buffer: -0.203762 +vertex_buffer: 5.636180 +vertex_buffer: 0.734400 +vertex_buffer: 0.181300 +vertex_buffer: 1.685000 +vertex_buffer: 0.275446 +vertex_buffer: 5.633972 +vertex_buffer: 0.690100 +vertex_buffer: 0.120500 +vertex_buffer: 1.968000 +vertex_buffer: 0.003313 +vertex_buffer: 5.624982 +vertex_buffer: 0.716200 +vertex_buffer: 0.148600 +vertex_buffer: 1.966000 +vertex_buffer: -0.007189 +vertex_buffer: 5.672222 +vertex_buffer: 0.713500 +vertex_buffer: 0.151100 +vertex_buffer: 1.683000 +vertex_buffer: 0.265122 +vertex_buffer: 5.683293 +vertex_buffer: 0.687700 +vertex_buffer: 0.123700 +vertex_buffer: 1.445000 +vertex_buffer: 0.637469 +vertex_buffer: 5.591790 +vertex_buffer: 0.658000 +vertex_buffer: 0.093300 +vertex_buffer: 1.442000 +vertex_buffer: 0.628247 +vertex_buffer: 5.646198 +vertex_buffer: 0.655700 +vertex_buffer: 0.097100 +vertex_buffer: 1.230000 +vertex_buffer: 1.052009 +vertex_buffer: 5.594151 +vertex_buffer: 0.617900 +vertex_buffer: 0.073000 +vertex_buffer: 1.233000 +vertex_buffer: 1.062901 +vertex_buffer: 5.534851 +vertex_buffer: 0.619700 +vertex_buffer: 0.068500 +vertex_buffer: 1.037000 +vertex_buffer: 1.530892 +vertex_buffer: 5.500604 +vertex_buffer: 0.576100 +vertex_buffer: 0.048400 +vertex_buffer: 1.036000 +vertex_buffer: 1.512598 +vertex_buffer: 5.563668 +vertex_buffer: 0.575200 +vertex_buffer: 0.053800 +vertex_buffer: 0.886000 +vertex_buffer: 1.907435 +vertex_buffer: 5.549905 +vertex_buffer: 0.537400 +vertex_buffer: 0.041500 +vertex_buffer: 0.886000 +vertex_buffer: 1.931060 +vertex_buffer: 5.484957 +vertex_buffer: 0.537500 +vertex_buffer: 0.035700 +vertex_buffer: 0.588999 +vertex_buffer: 2.757072 +vertex_buffer: 5.759195 +vertex_buffer: 0.449400 +vertex_buffer: 0.045700 +vertex_buffer: 0.595000 +vertex_buffer: 2.760144 +vertex_buffer: 5.703512 +vertex_buffer: 0.451200 +vertex_buffer: 0.040500 +vertex_buffer: 0.680000 +vertex_buffer: 2.541166 +vertex_buffer: 5.606604 +vertex_buffer: 0.474100 +vertex_buffer: 0.035000 +vertex_buffer: 0.672999 +vertex_buffer: 2.535648 +vertex_buffer: 5.667060 +vertex_buffer: 0.472600 +vertex_buffer: 0.040600 +vertex_buffer: 0.756000 +vertex_buffer: 2.322095 +vertex_buffer: 5.516086 +vertex_buffer: 0.497400 +vertex_buffer: 0.030800 +vertex_buffer: 0.750999 +vertex_buffer: 2.311127 +vertex_buffer: 5.580282 +vertex_buffer: 0.496400 +vertex_buffer: 0.036400 +vertex_buffer: 0.299000 +vertex_buffer: 3.077085 +vertex_buffer: 5.811524 +vertex_buffer: 0.408900 +vertex_buffer: 0.040000 +vertex_buffer: 0.173999 +vertex_buffer: 3.093057 +vertex_buffer: 5.814032 +vertex_buffer: 0.398700 +vertex_buffer: 0.035400 +vertex_buffer: 0.175000 +vertex_buffer: 3.099635 +vertex_buffer: 5.756340 +vertex_buffer: 0.400300 +vertex_buffer: 0.030400 +vertex_buffer: 0.301000 +vertex_buffer: 3.082687 +vertex_buffer: 5.753816 +vertex_buffer: 0.410600 +vertex_buffer: 0.034900 +vertex_buffer: 0.427000 +vertex_buffer: 3.032812 +vertex_buffer: 5.750162 +vertex_buffer: 0.421900 +vertex_buffer: 0.038700 +vertex_buffer: 0.423000 +vertex_buffer: 3.027142 +vertex_buffer: 5.806880 +vertex_buffer: 0.420200 +vertex_buffer: 0.043900 +vertex_buffer: -0.000000 +vertex_buffer: 4.017064 +vertex_buffer: 5.782928 +vertex_buffer: 0.285000 +vertex_buffer: 0.090900 +vertex_buffer: 0.159131 +vertex_buffer: 4.022250 +vertex_buffer: 5.780377 +vertex_buffer: 0.289600 +vertex_buffer: 0.104400 +vertex_buffer: 0.159648 +vertex_buffer: 4.010368 +vertex_buffer: 5.744772 +vertex_buffer: 0.286400 +vertex_buffer: 0.105400 +vertex_buffer: -0.000000 +vertex_buffer: 4.003109 +vertex_buffer: 5.745740 +vertex_buffer: 0.281600 +vertex_buffer: 0.091900 +vertex_buffer: 0.159131 +vertex_buffer: 4.016697 +vertex_buffer: 5.809144 +vertex_buffer: 0.292200 +vertex_buffer: 0.103600 +vertex_buffer: -0.000000 +vertex_buffer: 4.012029 +vertex_buffer: 5.811717 +vertex_buffer: 0.287600 +vertex_buffer: 0.090100 +vertex_buffer: 1.403000 +vertex_buffer: 4.176071 +vertex_buffer: 5.776840 +vertex_buffer: 0.318200 +vertex_buffer: 0.213700 +vertex_buffer: 1.818000 +vertex_buffer: 4.251075 +vertex_buffer: 5.773822 +vertex_buffer: 0.322500 +vertex_buffer: 0.251600 +vertex_buffer: 1.819000 +vertex_buffer: 4.239105 +vertex_buffer: 5.747630 +vertex_buffer: 0.319900 +vertex_buffer: 0.252000 +vertex_buffer: 1.404000 +vertex_buffer: 4.164105 +vertex_buffer: 5.748632 +vertex_buffer: 0.315500 +vertex_buffer: 0.214200 +vertex_buffer: 2.357000 +vertex_buffer: 4.326095 +vertex_buffer: 5.757109 +vertex_buffer: 0.326000 +vertex_buffer: 0.300700 +vertex_buffer: 2.358000 +vertex_buffer: 4.314126 +vertex_buffer: 5.730964 +vertex_buffer: 0.323500 +vertex_buffer: 0.301200 +vertex_buffer: 3.033000 +vertex_buffer: 4.380135 +vertex_buffer: 5.724594 +vertex_buffer: 0.330600 +vertex_buffer: 0.361900 +vertex_buffer: 3.034000 +vertex_buffer: 4.368166 +vertex_buffer: 5.698512 +vertex_buffer: 0.328100 +vertex_buffer: 0.362500 +vertex_buffer: 3.710000 +vertex_buffer: 4.409187 +vertex_buffer: 5.680248 +vertex_buffer: 0.336800 +vertex_buffer: 0.423200 +vertex_buffer: 3.709000 +vertex_buffer: 4.398221 +vertex_buffer: 5.654215 +vertex_buffer: 0.334300 +vertex_buffer: 0.423700 +vertex_buffer: 4.246000 +vertex_buffer: 4.400284 +vertex_buffer: 5.601825 +vertex_buffer: 0.340600 +vertex_buffer: 0.472600 +vertex_buffer: 4.250000 +vertex_buffer: 4.411252 +vertex_buffer: 5.627885 +vertex_buffer: 0.343200 +vertex_buffer: 0.472100 +vertex_buffer: 5.112000 +vertex_buffer: 3.972107 +vertex_buffer: 5.746829 +vertex_buffer: 0.410600 +vertex_buffer: 0.537000 +vertex_buffer: 5.079000 +vertex_buffer: 3.972429 +vertex_buffer: 5.482845 +vertex_buffer: 0.433800 +vertex_buffer: 0.531900 +vertex_buffer: 5.429000 +vertex_buffer: 3.924494 +vertex_buffer: 5.428460 +vertex_buffer: 0.440600 +vertex_buffer: 0.562900 +vertex_buffer: 5.465000 +vertex_buffer: 3.924174 +vertex_buffer: 5.692366 +vertex_buffer: 0.417700 +vertex_buffer: 0.569100 +vertex_buffer: 4.684000 +vertex_buffer: 4.010359 +vertex_buffer: 5.539529 +vertex_buffer: 0.427100 +vertex_buffer: 0.496500 +vertex_buffer: 4.715000 +vertex_buffer: 4.010037 +vertex_buffer: 5.804559 +vertex_buffer: 0.403600 +vertex_buffer: 0.500800 +vertex_buffer: 4.267000 +vertex_buffer: 4.031969 +vertex_buffer: 5.860978 +vertex_buffer: 0.397200 +vertex_buffer: 0.460000 +vertex_buffer: 4.239000 +vertex_buffer: 4.032291 +vertex_buffer: 5.595924 +vertex_buffer: 0.420800 +vertex_buffer: 0.456400 +vertex_buffer: 3.725000 +vertex_buffer: 4.032904 +vertex_buffer: 5.914433 +vertex_buffer: 0.390900 +vertex_buffer: 0.410500 +vertex_buffer: 3.700000 +vertex_buffer: 4.033226 +vertex_buffer: 5.649375 +vertex_buffer: 0.414600 +vertex_buffer: 0.407700 +vertex_buffer: 3.045000 +vertex_buffer: 4.003853 +vertex_buffer: 5.956707 +vertex_buffer: 0.385100 +vertex_buffer: 0.348500 +vertex_buffer: 3.025000 +vertex_buffer: 4.004175 +vertex_buffer: 5.691690 +vertex_buffer: 0.408800 +vertex_buffer: 0.346600 +vertex_buffer: 2.365000 +vertex_buffer: 3.950138 +vertex_buffer: 5.721011 +vertex_buffer: 0.405000 +vertex_buffer: 0.286800 +vertex_buffer: 2.381000 +vertex_buffer: 3.949815 +vertex_buffer: 5.986923 +vertex_buffer: 0.381200 +vertex_buffer: 0.287500 +vertex_buffer: 1.873000 +vertex_buffer: 3.874120 +vertex_buffer: 5.735144 +vertex_buffer: 0.404700 +vertex_buffer: 0.242300 +vertex_buffer: 1.885000 +vertex_buffer: 3.873796 +vertex_buffer: 6.001936 +vertex_buffer: 0.380900 +vertex_buffer: 0.240900 +vertex_buffer: 1.518000 +vertex_buffer: 3.764122 +vertex_buffer: 5.735279 +vertex_buffer: 0.408400 +vertex_buffer: 0.210300 +vertex_buffer: 1.528000 +vertex_buffer: 3.763783 +vertex_buffer: 6.013980 +vertex_buffer: 0.383900 +vertex_buffer: 0.205500 +vertex_buffer: 1.272000 +vertex_buffer: 3.607124 +vertex_buffer: 5.732809 +vertex_buffer: 0.414800 +vertex_buffer: 0.186800 +vertex_buffer: 1.280000 +vertex_buffer: 3.606770 +vertex_buffer: 6.023445 +vertex_buffer: 0.390500 +vertex_buffer: 0.177600 +vertex_buffer: 1.115000 +vertex_buffer: 3.426127 +vertex_buffer: 5.730206 +vertex_buffer: 0.422700 +vertex_buffer: 0.169700 +vertex_buffer: 1.122000 +vertex_buffer: 3.425767 +vertex_buffer: 6.026797 +vertex_buffer: 0.400200 +vertex_buffer: 0.155900 +vertex_buffer: 1.039000 +vertex_buffer: 3.238753 +vertex_buffer: 6.025306 +vertex_buffer: 0.412100 +vertex_buffer: 0.139500 +vertex_buffer: 1.032000 +vertex_buffer: 3.240570 +vertex_buffer: 5.729738 +vertex_buffer: 0.431400 +vertex_buffer: 0.157400 +vertex_buffer: 1.010000 +vertex_buffer: 3.046713 +vertex_buffer: 6.021124 +vertex_buffer: 0.426100 +vertex_buffer: 0.126900 +vertex_buffer: 1.003000 +vertex_buffer: 3.050018 +vertex_buffer: 5.729583 +vertex_buffer: 0.441400 +vertex_buffer: 0.147900 +vertex_buffer: 1.018000 +vertex_buffer: 2.850726 +vertex_buffer: 6.013214 +vertex_buffer: 0.442200 +vertex_buffer: 0.117900 +vertex_buffer: 1.012000 +vertex_buffer: 2.855426 +vertex_buffer: 5.730688 +vertex_buffer: 0.453200 +vertex_buffer: 0.140500 +vertex_buffer: 1.045000 +vertex_buffer: 2.650755 +vertex_buffer: 6.005554 +vertex_buffer: 0.459700 +vertex_buffer: 0.111800 +vertex_buffer: 1.038000 +vertex_buffer: 2.656807 +vertex_buffer: 5.731023 +vertex_buffer: 0.466800 +vertex_buffer: 0.135100 +vertex_buffer: 1.171000 +vertex_buffer: 1.989006 +vertex_buffer: 5.722197 +vertex_buffer: 0.518200 +vertex_buffer: 0.130800 +vertex_buffer: 1.105000 +vertex_buffer: 2.240546 +vertex_buffer: 5.727662 +vertex_buffer: 0.497800 +vertex_buffer: 0.130100 +vertex_buffer: 1.113000 +vertex_buffer: 2.231652 +vertex_buffer: 5.994177 +vertex_buffer: 0.497000 +vertex_buffer: 0.106300 +vertex_buffer: 1.181000 +vertex_buffer: 1.978282 +vertex_buffer: 5.988693 +vertex_buffer: 0.520100 +vertex_buffer: 0.107100 +vertex_buffer: 1.271000 +vertex_buffer: 1.692739 +vertex_buffer: 5.980989 +vertex_buffer: 0.546100 +vertex_buffer: 0.110600 +vertex_buffer: 1.260000 +vertex_buffer: 1.705466 +vertex_buffer: 5.715535 +vertex_buffer: 0.542000 +vertex_buffer: 0.133800 +vertex_buffer: 1.695000 +vertex_buffer: 0.704779 +vertex_buffer: 5.935099 +vertex_buffer: 0.636000 +vertex_buffer: 0.139800 +vertex_buffer: 1.683000 +vertex_buffer: 0.724615 +vertex_buffer: 5.669944 +vertex_buffer: 0.624600 +vertex_buffer: 0.160400 +vertex_buffer: 1.503000 +vertex_buffer: 1.055693 +vertex_buffer: 5.689961 +vertex_buffer: 0.596800 +vertex_buffer: 0.148200 +vertex_buffer: 1.519000 +vertex_buffer: 1.038253 +vertex_buffer: 5.955282 +vertex_buffer: 0.605500 +vertex_buffer: 0.126300 +vertex_buffer: 1.894000 +vertex_buffer: 0.425968 +vertex_buffer: 5.644423 +vertex_buffer: 0.648800 +vertex_buffer: 0.175900 +vertex_buffer: 1.907000 +vertex_buffer: 0.403973 +vertex_buffer: 5.909450 +vertex_buffer: 0.663600 +vertex_buffer: 0.157600 +vertex_buffer: 2.156000 +vertex_buffer: 0.166070 +vertex_buffer: 5.880907 +vertex_buffer: 0.685700 +vertex_buffer: 0.179200 +vertex_buffer: 2.142000 +vertex_buffer: 0.189787 +vertex_buffer: 5.615995 +vertex_buffer: 0.667600 +vertex_buffer: 0.194200 +vertex_buffer: 2.430000 +vertex_buffer: 0.020092 +vertex_buffer: 5.585466 +vertex_buffer: 0.681500 +vertex_buffer: 0.215700 +vertex_buffer: 2.447000 +vertex_buffer: -0.004859 +vertex_buffer: 5.850289 +vertex_buffer: 0.702400 +vertex_buffer: 0.204700 +vertex_buffer: 2.784000 +vertex_buffer: -0.106871 +vertex_buffer: 5.818572 +vertex_buffer: 0.714000 +vertex_buffer: 0.234300 +vertex_buffer: 2.766000 +vertex_buffer: -0.081173 +vertex_buffer: 5.553818 +vertex_buffer: 0.691500 +vertex_buffer: 0.241600 +vertex_buffer: 3.136000 +vertex_buffer: -0.136021 +vertex_buffer: 5.520313 +vertex_buffer: 0.698100 +vertex_buffer: 0.271600 +vertex_buffer: 3.157000 +vertex_buffer: -0.162147 +vertex_buffer: 5.785029 +vertex_buffer: 0.721300 +vertex_buffer: 0.267400 +vertex_buffer: 3.555000 +vertex_buffer: -0.192763 +vertex_buffer: 5.747826 +vertex_buffer: 0.725400 +vertex_buffer: 0.303100 +vertex_buffer: 3.531000 +vertex_buffer: -0.166494 +vertex_buffer: 5.484131 +vertex_buffer: 0.701800 +vertex_buffer: 0.304700 +vertex_buffer: 3.976000 +vertex_buffer: -0.148364 +vertex_buffer: 5.442722 +vertex_buffer: 0.702100 +vertex_buffer: 0.342800 +vertex_buffer: 4.003000 +vertex_buffer: -0.174541 +vertex_buffer: 5.706427 +vertex_buffer: 0.725800 +vertex_buffer: 0.343700 +vertex_buffer: 4.527000 +vertex_buffer: -0.084248 +vertex_buffer: 5.658007 +vertex_buffer: 0.721200 +vertex_buffer: 0.391700 +vertex_buffer: 4.497000 +vertex_buffer: -0.058778 +vertex_buffer: 5.395236 +vertex_buffer: 0.697700 +vertex_buffer: 0.388400 +vertex_buffer: 4.998000 +vertex_buffer: 0.089161 +vertex_buffer: 5.349538 +vertex_buffer: 0.689300 +vertex_buffer: 0.433000 +vertex_buffer: 5.031000 +vertex_buffer: 0.064621 +vertex_buffer: 5.613392 +vertex_buffer: 0.712400 +vertex_buffer: 0.438700 +vertex_buffer: 5.421000 +vertex_buffer: 0.257431 +vertex_buffer: 5.576402 +vertex_buffer: 0.701000 +vertex_buffer: 0.476900 +vertex_buffer: 5.385000 +vertex_buffer: 0.281523 +vertex_buffer: 5.313533 +vertex_buffer: 0.678700 +vertex_buffer: 0.468700 +vertex_buffer: 5.721000 +vertex_buffer: 0.496939 +vertex_buffer: 5.548265 +vertex_buffer: 0.687200 +vertex_buffer: 0.509400 +vertex_buffer: 5.683000 +vertex_buffer: 0.518260 +vertex_buffer: 5.286230 +vertex_buffer: 0.666000 +vertex_buffer: 0.498600 +vertex_buffer: 5.918000 +vertex_buffer: 0.804496 +vertex_buffer: 5.264621 +vertex_buffer: 0.650300 +vertex_buffer: 0.525800 +vertex_buffer: 5.957000 +vertex_buffer: 0.784151 +vertex_buffer: 5.527718 +vertex_buffer: 0.670200 +vertex_buffer: 0.538900 +vertex_buffer: 6.161000 +vertex_buffer: 1.167104 +vertex_buffer: 5.510539 +vertex_buffer: 0.646800 +vertex_buffer: 0.570600 +vertex_buffer: 6.120000 +vertex_buffer: 1.183657 +vertex_buffer: 5.248272 +vertex_buffer: 0.628500 +vertex_buffer: 0.555500 +vertex_buffer: 6.320000 +vertex_buffer: 1.704134 +vertex_buffer: 5.230291 +vertex_buffer: 0.596500 +vertex_buffer: 0.591700 +vertex_buffer: 6.362000 +vertex_buffer: 1.690342 +vertex_buffer: 5.492670 +vertex_buffer: 0.613200 +vertex_buffer: 0.608400 +vertex_buffer: 6.474000 +vertex_buffer: 2.198605 +vertex_buffer: 5.215522 +vertex_buffer: 0.564500 +vertex_buffer: 0.621700 +vertex_buffer: 6.517000 +vertex_buffer: 2.189372 +vertex_buffer: 5.478011 +vertex_buffer: 0.578900 +vertex_buffer: 0.640500 +vertex_buffer: 6.539000 +vertex_buffer: 2.504945 +vertex_buffer: 5.211481 +vertex_buffer: 0.544500 +vertex_buffer: 0.635600 +vertex_buffer: 6.582000 +vertex_buffer: 2.497934 +vertex_buffer: 5.474017 +vertex_buffer: 0.555500 +vertex_buffer: 0.656500 +vertex_buffer: 6.553000 +vertex_buffer: 2.725349 +vertex_buffer: 5.213535 +vertex_buffer: 0.530300 +vertex_buffer: 0.641900 +vertex_buffer: 6.596000 +vertex_buffer: 2.719937 +vertex_buffer: 5.476101 +vertex_buffer: 0.537300 +vertex_buffer: 0.664300 +vertex_buffer: 6.513000 +vertex_buffer: 3.409522 +vertex_buffer: 5.507394 +vertex_buffer: 0.474900 +vertex_buffer: 0.663500 +vertex_buffer: 6.470000 +vertex_buffer: 3.409966 +vertex_buffer: 5.244696 +vertex_buffer: 0.483800 +vertex_buffer: 0.641800 +vertex_buffer: 6.533000 +vertex_buffer: 3.199424 +vertex_buffer: 5.228640 +vertex_buffer: 0.497800 +vertex_buffer: 0.645600 +vertex_buffer: 6.576000 +vertex_buffer: 3.198442 +vertex_buffer: 5.491320 +vertex_buffer: 0.494500 +vertex_buffer: 0.668800 +vertex_buffer: 6.364000 +vertex_buffer: 3.585690 +vertex_buffer: 5.266664 +vertex_buffer: 0.472500 +vertex_buffer: 0.635000 +vertex_buffer: 6.406000 +vertex_buffer: 3.585370 +vertex_buffer: 5.529394 +vertex_buffer: 0.458400 +vertex_buffer: 0.653700 +vertex_buffer: 6.252000 +vertex_buffer: 3.712336 +vertex_buffer: 5.558482 +vertex_buffer: 0.445200 +vertex_buffer: 0.640100 +vertex_buffer: 6.211000 +vertex_buffer: 3.712655 +vertex_buffer: 5.295723 +vertex_buffer: 0.463500 +vertex_buffer: 0.625200 +vertex_buffer: 6.045000 +vertex_buffer: 3.803292 +vertex_buffer: 5.595116 +vertex_buffer: 0.434500 +vertex_buffer: 0.621600 +vertex_buffer: 6.005000 +vertex_buffer: 3.802610 +vertex_buffer: 5.332299 +vertex_buffer: 0.455400 +vertex_buffer: 0.610600 +vertex_buffer: 6.037000 +vertex_buffer: 4.056250 +vertex_buffer: 5.629091 +vertex_buffer: 0.411100 +vertex_buffer: 0.623400 +vertex_buffer: 6.039000 +vertex_buffer: 4.074257 +vertex_buffer: 5.623531 +vertex_buffer: 0.409400 +vertex_buffer: 0.623900 +vertex_buffer: 6.423000 +vertex_buffer: 3.986328 +vertex_buffer: 5.564086 +vertex_buffer: 0.421000 +vertex_buffer: 0.658400 +vertex_buffer: 6.423000 +vertex_buffer: 4.004336 +vertex_buffer: 5.558594 +vertex_buffer: 0.419200 +vertex_buffer: 0.658700 +vertex_buffer: 0.161204 +vertex_buffer: 3.872203 +vertex_buffer: 6.124216 +vertex_buffer: 0.328200 +vertex_buffer: 0.091200 +vertex_buffer: -0.000000 +vertex_buffer: 3.870641 +vertex_buffer: 6.129645 +vertex_buffer: 0.322400 +vertex_buffer: 0.078000 +vertex_buffer: -0.000000 +vertex_buffer: 3.889649 +vertex_buffer: 6.124037 +vertex_buffer: 0.321300 +vertex_buffer: 0.078500 +vertex_buffer: 0.161204 +vertex_buffer: 3.891212 +vertex_buffer: 6.117588 +vertex_buffer: 0.326700 +vertex_buffer: 0.091900 +vertex_buffer: 5.123000 +vertex_buffer: 4.039073 +vertex_buffer: 5.774945 +vertex_buffer: 0.404100 +vertex_buffer: 0.538900 +vertex_buffer: 4.721000 +vertex_buffer: 4.077003 +vertex_buffer: 5.831477 +vertex_buffer: 0.397000 +vertex_buffer: 0.502300 +vertex_buffer: 4.271000 +vertex_buffer: 4.098935 +vertex_buffer: 5.888552 +vertex_buffer: 0.390400 +vertex_buffer: 0.461300 +vertex_buffer: 3.729000 +vertex_buffer: 4.096870 +vertex_buffer: 5.941803 +vertex_buffer: 0.384400 +vertex_buffer: 0.412000 +vertex_buffer: 3.048000 +vertex_buffer: 4.063818 +vertex_buffer: 5.984507 +vertex_buffer: 0.378900 +vertex_buffer: 0.349800 +vertex_buffer: 2.381000 +vertex_buffer: 4.006780 +vertex_buffer: 6.014984 +vertex_buffer: 0.375400 +vertex_buffer: 0.288600 +vertex_buffer: 1.878000 +vertex_buffer: 3.931761 +vertex_buffer: 6.031074 +vertex_buffer: 0.374400 +vertex_buffer: 0.241400 +vertex_buffer: 1.511000 +vertex_buffer: 3.825745 +vertex_buffer: 6.044150 +vertex_buffer: 0.376500 +vertex_buffer: 0.205400 +vertex_buffer: 1.255000 +vertex_buffer: 3.678731 +vertex_buffer: 6.055741 +vertex_buffer: 0.382200 +vertex_buffer: 0.177200 +vertex_buffer: 1.089000 +vertex_buffer: 3.495725 +vertex_buffer: 6.060867 +vertex_buffer: 0.392000 +vertex_buffer: 0.154300 +vertex_buffer: 7.384000 +vertex_buffer: 3.421252 +vertex_buffer: 4.934308 +vertex_buffer: 0.485600 +vertex_buffer: 0.776500 +vertex_buffer: 7.368000 +vertex_buffer: 3.429113 +vertex_buffer: 4.918401 +vertex_buffer: 0.485300 +vertex_buffer: 0.778700 +vertex_buffer: 7.390000 +vertex_buffer: 3.421208 +vertex_buffer: 4.963075 +vertex_buffer: 0.485400 +vertex_buffer: 0.773800 +vertex_buffer: 6.682000 +vertex_buffer: 4.016409 +vertex_buffer: 5.498264 +vertex_buffer: 0.421100 +vertex_buffer: 0.685200 +vertex_buffer: 6.731000 +vertex_buffer: 4.042418 +vertex_buffer: 5.490926 +vertex_buffer: 0.421100 +vertex_buffer: 0.690200 +vertex_buffer: 6.705000 +vertex_buffer: 4.043604 +vertex_buffer: 5.335893 +vertex_buffer: 0.407500 +vertex_buffer: 0.692800 +vertex_buffer: 6.756000 +vertex_buffer: 4.052422 +vertex_buffer: 5.487188 +vertex_buffer: 0.421100 +vertex_buffer: 0.692600 +vertex_buffer: 6.730000 +vertex_buffer: 4.053610 +vertex_buffer: 5.332153 +vertex_buffer: 0.407600 +vertex_buffer: 0.696200 +vertex_buffer: 7.245000 +vertex_buffer: 3.945572 +vertex_buffer: 5.364015 +vertex_buffer: 0.439000 +vertex_buffer: 0.733300 +vertex_buffer: 7.324000 +vertex_buffer: 3.925648 +vertex_buffer: 5.302373 +vertex_buffer: 0.441600 +vertex_buffer: 0.741500 +vertex_buffer: 7.163000 +vertex_buffer: 3.956799 +vertex_buffer: 5.178050 +vertex_buffer: 0.423200 +vertex_buffer: 0.741400 +vertex_buffer: 7.332000 +vertex_buffer: 3.556638 +vertex_buffer: 5.309948 +vertex_buffer: 0.473200 +vertex_buffer: 0.741700 +vertex_buffer: 7.382000 +vertex_buffer: 3.563775 +vertex_buffer: 5.197168 +vertex_buffer: 0.472400 +vertex_buffer: 0.753000 +vertex_buffer: 7.388000 +vertex_buffer: 3.689770 +vertex_buffer: 5.200229 +vertex_buffer: 0.461200 +vertex_buffer: 0.753100 +vertex_buffer: 7.338000 +vertex_buffer: 3.693634 +vertex_buffer: 5.313308 +vertex_buffer: 0.461200 +vertex_buffer: 0.742000 +vertex_buffer: 7.394000 +vertex_buffer: 3.828768 +vertex_buffer: 5.202885 +vertex_buffer: 0.448900 +vertex_buffer: 0.752700 +vertex_buffer: 7.344000 +vertex_buffer: 3.841630 +vertex_buffer: 5.316260 +vertex_buffer: 0.448600 +vertex_buffer: 0.742100 +vertex_buffer: 7.404000 +vertex_buffer: 3.567912 +vertex_buffer: 5.083268 +vertex_buffer: 0.471600 +vertex_buffer: 0.763600 +vertex_buffer: 7.409000 +vertex_buffer: 3.688910 +vertex_buffer: 5.086159 +vertex_buffer: 0.460500 +vertex_buffer: 0.763400 +vertex_buffer: 7.378000 +vertex_buffer: 3.465778 +vertex_buffer: 5.194996 +vertex_buffer: 0.481200 +vertex_buffer: 0.752700 +vertex_buffer: 7.328000 +vertex_buffer: 3.449639 +vertex_buffer: 5.308569 +vertex_buffer: 0.482800 +vertex_buffer: 0.741300 +vertex_buffer: 7.400000 +vertex_buffer: 3.473913 +vertex_buffer: 5.082194 +vertex_buffer: 0.480200 +vertex_buffer: 0.763300 +vertex_buffer: 7.015000 +vertex_buffer: 2.609164 +vertex_buffer: 5.423767 +vertex_buffer: 0.557400 +vertex_buffer: 0.697000 +vertex_buffer: 6.905000 +vertex_buffer: 2.185023 +vertex_buffer: 5.432654 +vertex_buffer: 0.590000 +vertex_buffer: 0.673600 +vertex_buffer: 6.908000 +vertex_buffer: 2.184744 +vertex_buffer: 5.412607 +vertex_buffer: 0.590700 +vertex_buffer: 0.674900 +vertex_buffer: 7.020000 +vertex_buffer: 2.610649 +vertex_buffer: 5.402780 +vertex_buffer: 0.558100 +vertex_buffer: 0.698700 +vertex_buffer: 7.088000 +vertex_buffer: 2.910428 +vertex_buffer: 5.398292 +vertex_buffer: 0.533900 +vertex_buffer: 0.713500 +vertex_buffer: 7.084000 +vertex_buffer: 2.909114 +vertex_buffer: 5.419270 +vertex_buffer: 0.532900 +vertex_buffer: 0.711400 +vertex_buffer: 7.014000 +vertex_buffer: 2.612348 +vertex_buffer: 5.372723 +vertex_buffer: 0.559400 +vertex_buffer: 0.701000 +vertex_buffer: 7.083000 +vertex_buffer: 2.911894 +vertex_buffer: 5.367240 +vertex_buffer: 0.535500 +vertex_buffer: 0.716300 +vertex_buffer: 6.903000 +vertex_buffer: 2.185808 +vertex_buffer: 5.382556 +vertex_buffer: 0.592200 +vertex_buffer: 0.676700 +vertex_buffer: 6.739000 +vertex_buffer: 1.640851 +vertex_buffer: 5.449055 +vertex_buffer: 0.628600 +vertex_buffer: 0.639300 +vertex_buffer: 6.529000 +vertex_buffer: 1.085318 +vertex_buffer: 5.466516 +vertex_buffer: 0.665200 +vertex_buffer: 0.600100 +vertex_buffer: 6.533000 +vertex_buffer: 1.084648 +vertex_buffer: 5.446417 +vertex_buffer: 0.666100 +vertex_buffer: 0.601300 +vertex_buffer: 6.742000 +vertex_buffer: 1.640917 +vertex_buffer: 5.427999 +vertex_buffer: 0.629500 +vertex_buffer: 0.640600 +vertex_buffer: 6.737000 +vertex_buffer: 1.642377 +vertex_buffer: 5.398982 +vertex_buffer: 0.631000 +vertex_buffer: 0.642200 +vertex_buffer: 6.528000 +vertex_buffer: 1.086538 +vertex_buffer: 5.417442 +vertex_buffer: 0.667800 +vertex_buffer: 0.602700 +vertex_buffer: 6.288000 +vertex_buffer: 0.625632 +vertex_buffer: 5.486957 +vertex_buffer: 0.693500 +vertex_buffer: 0.563300 +vertex_buffer: 6.000000 +vertex_buffer: 0.257294 +vertex_buffer: 5.509676 +vertex_buffer: 0.715000 +vertex_buffer: 0.527700 +vertex_buffer: 6.003000 +vertex_buffer: 0.255079 +vertex_buffer: 5.489431 +vertex_buffer: 0.716100 +vertex_buffer: 0.528500 +vertex_buffer: 6.291000 +vertex_buffer: 0.625286 +vertex_buffer: 5.465850 +vertex_buffer: 0.694500 +vertex_buffer: 0.564300 +vertex_buffer: 6.286000 +vertex_buffer: 0.627545 +vertex_buffer: 5.436914 +vertex_buffer: 0.696300 +vertex_buffer: 0.565500 +vertex_buffer: 5.998000 +vertex_buffer: 0.257626 +vertex_buffer: 5.460527 +vertex_buffer: 0.717900 +vertex_buffer: 0.529200 +vertex_buffer: 5.654000 +vertex_buffer: -0.031813 +vertex_buffer: 5.520029 +vertex_buffer: 0.732100 +vertex_buffer: 0.491600 +vertex_buffer: 5.653000 +vertex_buffer: -0.027764 +vertex_buffer: 5.540468 +vertex_buffer: 0.730900 +vertex_buffer: 0.491200 +vertex_buffer: 5.206000 +vertex_buffer: -0.240425 +vertex_buffer: 5.581224 +vertex_buffer: 0.742200 +vertex_buffer: 0.448800 +vertex_buffer: 5.207000 +vertex_buffer: -0.244371 +vertex_buffer: 5.560722 +vertex_buffer: 0.743600 +vertex_buffer: 0.449000 +vertex_buffer: 5.650000 +vertex_buffer: -0.029051 +vertex_buffer: 5.491153 +vertex_buffer: 0.733900 +vertex_buffer: 0.492100 +vertex_buffer: 5.203000 +vertex_buffer: -0.241444 +vertex_buffer: 5.531871 +vertex_buffer: 0.745600 +vertex_buffer: 0.449300 +vertex_buffer: 4.621000 +vertex_buffer: -0.391864 +vertex_buffer: 5.635941 +vertex_buffer: 0.750000 +vertex_buffer: 0.395500 +vertex_buffer: 4.016000 +vertex_buffer: -0.480513 +vertex_buffer: 5.673766 +vertex_buffer: 0.754500 +vertex_buffer: 0.340800 +vertex_buffer: 4.620000 +vertex_buffer: -0.395742 +vertex_buffer: 5.615425 +vertex_buffer: 0.751500 +vertex_buffer: 0.395400 +vertex_buffer: 4.617000 +vertex_buffer: -0.392692 +vertex_buffer: 5.586582 +vertex_buffer: 0.753800 +vertex_buffer: 0.395400 +vertex_buffer: 4.013000 +vertex_buffer: -0.477298 +vertex_buffer: 5.643938 +vertex_buffer: 0.756800 +vertex_buffer: 0.340500 +vertex_buffer: 3.518000 +vertex_buffer: -0.492638 +vertex_buffer: 5.720739 +vertex_buffer: 0.752500 +vertex_buffer: 0.296200 +vertex_buffer: 3.084000 +vertex_buffer: -0.448549 +vertex_buffer: 5.761671 +vertex_buffer: 0.746100 +vertex_buffer: 0.257700 +vertex_buffer: 3.515000 +vertex_buffer: -0.489527 +vertex_buffer: 5.691905 +vertex_buffer: 0.754500 +vertex_buffer: 0.295700 +vertex_buffer: 3.082000 +vertex_buffer: -0.445479 +vertex_buffer: 5.732834 +vertex_buffer: 0.747900 +vertex_buffer: 0.257000 +vertex_buffer: 2.305000 +vertex_buffer: -0.223998 +vertex_buffer: 5.838167 +vertex_buffer: 0.720500 +vertex_buffer: 0.190100 +vertex_buffer: 2.677000 +vertex_buffer: -0.361736 +vertex_buffer: 5.800979 +vertex_buffer: 0.735300 +vertex_buffer: 0.222100 +vertex_buffer: 2.675000 +vertex_buffer: -0.358633 +vertex_buffer: 5.771140 +vertex_buffer: 0.737000 +vertex_buffer: 0.221200 +vertex_buffer: 2.303000 +vertex_buffer: -0.221111 +vertex_buffer: 5.809313 +vertex_buffer: 0.721900 +vertex_buffer: 0.189000 +vertex_buffer: 1.692000 +vertex_buffer: 0.245474 +vertex_buffer: 5.908408 +vertex_buffer: 0.676300 +vertex_buffer: 0.137900 +vertex_buffer: 1.976000 +vertex_buffer: -0.026330 +vertex_buffer: 5.874317 +vertex_buffer: 0.701400 +vertex_buffer: 0.161900 +vertex_buffer: 1.975000 +vertex_buffer: -0.023597 +vertex_buffer: 5.845441 +vertex_buffer: 0.702600 +vertex_buffer: 0.160700 +vertex_buffer: 1.691000 +vertex_buffer: 0.248176 +vertex_buffer: 5.877510 +vertex_buffer: 0.677500 +vertex_buffer: 0.136600 +vertex_buffer: 1.452000 +vertex_buffer: 0.604624 +vertex_buffer: 5.938093 +vertex_buffer: 0.643600 +vertex_buffer: 0.118300 +vertex_buffer: 1.254000 +vertex_buffer: 0.989443 +vertex_buffer: 5.961424 +vertex_buffer: 0.608300 +vertex_buffer: 0.103100 +vertex_buffer: 1.450000 +vertex_buffer: 0.607258 +vertex_buffer: 5.904162 +vertex_buffer: 0.644900 +vertex_buffer: 0.116600 +vertex_buffer: 1.252000 +vertex_buffer: 0.994063 +vertex_buffer: 5.922544 +vertex_buffer: 0.609600 +vertex_buffer: 0.100800 +vertex_buffer: 0.772000 +vertex_buffer: 2.209203 +vertex_buffer: 6.003275 +vertex_buffer: 0.494500 +vertex_buffer: 0.074900 +vertex_buffer: 0.865999 +vertex_buffer: 1.935713 +vertex_buffer: 5.995851 +vertex_buffer: 0.520400 +vertex_buffer: 0.078900 +vertex_buffer: 0.971999 +vertex_buffer: 1.652134 +vertex_buffer: 5.987543 +vertex_buffer: 0.547000 +vertex_buffer: 0.084400 +vertex_buffer: 0.863000 +vertex_buffer: 1.948596 +vertex_buffer: 5.952001 +vertex_buffer: 0.520800 +vertex_buffer: 0.075400 +vertex_buffer: 0.966999 +vertex_buffer: 1.668315 +vertex_buffer: 5.944821 +vertex_buffer: 0.547200 +vertex_buffer: 0.081100 +vertex_buffer: 0.769999 +vertex_buffer: 2.215653 +vertex_buffer: 5.961298 +vertex_buffer: 0.495500 +vertex_buffer: 0.071400 +vertex_buffer: 0.424999 +vertex_buffer: 3.024258 +vertex_buffer: 6.048843 +vertex_buffer: 0.412600 +vertex_buffer: 0.066600 +vertex_buffer: 0.514999 +vertex_buffer: 2.916695 +vertex_buffer: 6.040083 +vertex_buffer: 0.425600 +vertex_buffer: 0.069300 +vertex_buffer: 0.591999 +vertex_buffer: 2.742815 +vertex_buffer: 6.028760 +vertex_buffer: 0.443300 +vertex_buffer: 0.070900 +vertex_buffer: 0.513999 +vertex_buffer: 2.917200 +vertex_buffer: 6.005992 +vertex_buffer: 0.427100 +vertex_buffer: 0.065200 +vertex_buffer: 0.591999 +vertex_buffer: 2.744505 +vertex_buffer: 5.993680 +vertex_buffer: 0.444800 +vertex_buffer: 0.067000 +vertex_buffer: 0.424999 +vertex_buffer: 3.024675 +vertex_buffer: 6.013738 +vertex_buffer: 0.414200 +vertex_buffer: 0.062300 +vertex_buffer: -0.000000 +vertex_buffer: 3.098554 +vertex_buffer: 6.066861 +vertex_buffer: 0.377600 +vertex_buffer: 0.047600 +vertex_buffer: 0.175000 +vertex_buffer: 3.090547 +vertex_buffer: 6.062042 +vertex_buffer: 0.390800 +vertex_buffer: 0.056200 +vertex_buffer: 0.175999 +vertex_buffer: 3.101383 +vertex_buffer: 6.086278 +vertex_buffer: 0.389600 +vertex_buffer: 0.058200 +vertex_buffer: -0.000000 +vertex_buffer: 3.110404 +vertex_buffer: 6.091122 +vertex_buffer: 0.377400 +vertex_buffer: 0.048200 +vertex_buffer: -0.000000 +vertex_buffer: 3.098910 +vertex_buffer: 6.030756 +vertex_buffer: 0.378000 +vertex_buffer: 0.046100 +vertex_buffer: 0.175000 +vertex_buffer: 3.090911 +vertex_buffer: 6.025931 +vertex_buffer: 0.392000 +vertex_buffer: 0.053200 +vertex_buffer: 0.160685 +vertex_buffer: 3.897754 +vertex_buffer: 6.097155 +vertex_buffer: 0.325000 +vertex_buffer: 0.092400 +vertex_buffer: 0.160167 +vertex_buffer: 3.898008 +vertex_buffer: 5.887536 +vertex_buffer: 0.307400 +vertex_buffer: 0.098600 +vertex_buffer: -0.000000 +vertex_buffer: 3.895930 +vertex_buffer: 5.891875 +vertex_buffer: 0.302500 +vertex_buffer: 0.085200 +vertex_buffer: -0.000000 +vertex_buffer: 3.895673 +vertex_buffer: 6.104099 +vertex_buffer: 0.320000 +vertex_buffer: 0.078900 +vertex_buffer: 0.566999 +vertex_buffer: 3.911705 +vertex_buffer: 6.076673 +vertex_buffer: 0.336500 +vertex_buffer: 0.127500 +vertex_buffer: 0.564999 +vertex_buffer: 3.911948 +vertex_buffer: 5.877517 +vertex_buffer: 0.319300 +vertex_buffer: 0.132900 +vertex_buffer: 0.308999 +vertex_buffer: 3.899940 +vertex_buffer: 5.883926 +vertex_buffer: 0.311900 +vertex_buffer: 0.111200 +vertex_buffer: 0.309999 +vertex_buffer: 3.899688 +vertex_buffer: 6.091125 +vertex_buffer: 0.329600 +vertex_buffer: 0.105100 +vertex_buffer: 1.138000 +vertex_buffer: 3.983746 +vertex_buffer: 6.043078 +vertex_buffer: 0.348800 +vertex_buffer: 0.179100 +vertex_buffer: 1.134000 +vertex_buffer: 3.983963 +vertex_buffer: 5.865006 +vertex_buffer: 0.332700 +vertex_buffer: 0.182700 +vertex_buffer: 0.913999 +vertex_buffer: 3.946957 +vertex_buffer: 5.869241 +vertex_buffer: 0.328200 +vertex_buffer: 0.163100 +vertex_buffer: 0.917998 +vertex_buffer: 3.946730 +vertex_buffer: 6.055342 +vertex_buffer: 0.344700 +vertex_buffer: 0.158600 +vertex_buffer: 1.446000 +vertex_buffer: 4.040765 +vertex_buffer: 6.028466 +vertex_buffer: 0.352600 +vertex_buffer: 0.207800 +vertex_buffer: 1.440000 +vertex_buffer: 4.040969 +vertex_buffer: 5.860402 +vertex_buffer: 0.337200 +vertex_buffer: 0.210500 +vertex_buffer: 1.849000 +vertex_buffer: 4.116782 +vertex_buffer: 6.014755 +vertex_buffer: 0.355300 +vertex_buffer: 0.245400 +vertex_buffer: 1.842000 +vertex_buffer: 4.116976 +vertex_buffer: 5.854689 +vertex_buffer: 0.340800 +vertex_buffer: 0.247500 +vertex_buffer: 2.375000 +vertex_buffer: 4.191801 +vertex_buffer: 5.998422 +vertex_buffer: 0.357900 +vertex_buffer: 0.293700 +vertex_buffer: 2.365000 +vertex_buffer: 4.191994 +vertex_buffer: 5.839317 +vertex_buffer: 0.343600 +vertex_buffer: 0.295500 +vertex_buffer: 3.049000 +vertex_buffer: 4.245842 +vertex_buffer: 5.964849 +vertex_buffer: 0.361800 +vertex_buffer: 0.354900 +vertex_buffer: 3.037000 +vertex_buffer: 4.246034 +vertex_buffer: 5.806757 +vertex_buffer: 0.347700 +vertex_buffer: 0.356800 +vertex_buffer: 3.730000 +vertex_buffer: 4.274898 +vertex_buffer: 5.918684 +vertex_buffer: 0.367500 +vertex_buffer: 0.416400 +vertex_buffer: 3.715000 +vertex_buffer: 4.275089 +vertex_buffer: 5.761637 +vertex_buffer: 0.353500 +vertex_buffer: 0.418500 +vertex_buffer: 4.272000 +vertex_buffer: 4.275964 +vertex_buffer: 5.864934 +vertex_buffer: 0.373700 +vertex_buffer: 0.465300 +vertex_buffer: 4.256000 +vertex_buffer: 4.276154 +vertex_buffer: 5.708864 +vertex_buffer: 0.359900 +vertex_buffer: 0.467800 +vertex_buffer: 4.708000 +vertex_buffer: 4.255222 +vertex_buffer: 5.651724 +vertex_buffer: 0.366600 +vertex_buffer: 0.509100 +vertex_buffer: 4.727000 +vertex_buffer: 4.255031 +vertex_buffer: 5.808805 +vertex_buffer: 0.380400 +vertex_buffer: 0.506400 +vertex_buffer: 5.141000 +vertex_buffer: 4.218103 +vertex_buffer: 5.749862 +vertex_buffer: 0.387700 +vertex_buffer: 0.543700 +vertex_buffer: 5.121000 +vertex_buffer: 4.218292 +vertex_buffer: 5.593769 +vertex_buffer: 0.374100 +vertex_buffer: 0.546800 +vertex_buffer: 5.541000 +vertex_buffer: 4.161374 +vertex_buffer: 5.527558 +vertex_buffer: 0.382900 +vertex_buffer: 0.585100 +vertex_buffer: 5.563000 +vertex_buffer: 4.161185 +vertex_buffer: 5.683634 +vertex_buffer: 0.396500 +vertex_buffer: 0.581700 +vertex_buffer: 6.544000 +vertex_buffer: 3.985577 +vertex_buffer: 5.360784 +vertex_buffer: 0.406900 +vertex_buffer: 0.676000 +vertex_buffer: 6.397000 +vertex_buffer: 4.010548 +vertex_buffer: 5.384649 +vertex_buffer: 0.403600 +vertex_buffer: 0.662500 +vertex_buffer: 6.421000 +vertex_buffer: 4.010359 +vertex_buffer: 5.539717 +vertex_buffer: 0.417300 +vertex_buffer: 0.659100 +vertex_buffer: 6.569000 +vertex_buffer: 3.985388 +vertex_buffer: 5.515851 +vertex_buffer: 0.420800 +vertex_buffer: 0.673000 +vertex_buffer: 6.599000 +vertex_buffer: 3.985394 +vertex_buffer: 5.510681 +vertex_buffer: 0.421300 +vertex_buffer: 0.676400 +vertex_buffer: 6.573000 +vertex_buffer: 3.986582 +vertex_buffer: 5.355649 +vertex_buffer: 0.407300 +vertex_buffer: 0.679000 +vertex_buffer: 7.319000 +vertex_buffer: 3.288599 +vertex_buffer: 5.301133 +vertex_buffer: 0.498200 +vertex_buffer: 0.740900 +vertex_buffer: 7.366000 +vertex_buffer: 3.322574 +vertex_buffer: 5.191911 +vertex_buffer: 0.494900 +vertex_buffer: 0.751700 +vertex_buffer: 7.373000 +vertex_buffer: 3.354023 +vertex_buffer: 5.193658 +vertex_buffer: 0.491700 +vertex_buffer: 0.752000 +vertex_buffer: 7.324000 +vertex_buffer: 3.321116 +vertex_buffer: 5.305876 +vertex_buffer: 0.494900 +vertex_buffer: 0.740900 +vertex_buffer: 7.375000 +vertex_buffer: 3.401205 +vertex_buffer: 5.194645 +vertex_buffer: 0.487200 +vertex_buffer: 0.752300 +vertex_buffer: 7.325000 +vertex_buffer: 3.374361 +vertex_buffer: 5.306991 +vertex_buffer: 0.489800 +vertex_buffer: 0.741000 +vertex_buffer: 7.395000 +vertex_buffer: 3.373998 +vertex_buffer: 5.080125 +vertex_buffer: 0.489800 +vertex_buffer: 0.762500 +vertex_buffer: 7.397000 +vertex_buffer: 3.416131 +vertex_buffer: 5.080994 +vertex_buffer: 0.485600 +vertex_buffer: 0.762900 +vertex_buffer: 7.387000 +vertex_buffer: 3.343615 +vertex_buffer: 5.081372 +vertex_buffer: 0.492900 +vertex_buffer: 0.761800 +vertex_buffer: 7.029000 +vertex_buffer: 3.235427 +vertex_buffer: 5.442892 +vertex_buffer: 0.500000 +vertex_buffer: 0.709900 +vertex_buffer: 7.070000 +vertex_buffer: 3.091998 +vertex_buffer: 5.433068 +vertex_buffer: 0.514600 +vertex_buffer: 0.712700 +vertex_buffer: 7.022000 +vertex_buffer: 2.889068 +vertex_buffer: 5.435387 +vertex_buffer: 0.532600 +vertex_buffer: 0.704800 +vertex_buffer: 6.851000 +vertex_buffer: 2.185456 +vertex_buffer: 5.448593 +vertex_buffer: 0.589200 +vertex_buffer: 0.669000 +vertex_buffer: 6.686000 +vertex_buffer: 1.648025 +vertex_buffer: 5.464385 +vertex_buffer: 0.627300 +vertex_buffer: 0.634700 +vertex_buffer: 6.477000 +vertex_buffer: 1.096167 +vertex_buffer: 5.483227 +vertex_buffer: 0.663500 +vertex_buffer: 0.595600 +vertex_buffer: 6.241000 +vertex_buffer: 0.648369 +vertex_buffer: 5.502466 +vertex_buffer: 0.690900 +vertex_buffer: 0.559700 +vertex_buffer: 5.962000 +vertex_buffer: 0.290931 +vertex_buffer: 5.525226 +vertex_buffer: 0.711800 +vertex_buffer: 0.525300 +vertex_buffer: 5.621000 +vertex_buffer: 0.011939 +vertex_buffer: 5.554988 +vertex_buffer: 0.727300 +vertex_buffer: 0.489600 +vertex_buffer: 5.182000 +vertex_buffer: -0.198576 +vertex_buffer: 5.594482 +vertex_buffer: 0.738500 +vertex_buffer: 0.447900 +vertex_buffer: 4.608000 +vertex_buffer: -0.350880 +vertex_buffer: 5.648281 +vertex_buffer: 0.746300 +vertex_buffer: 0.395400 +vertex_buffer: 0.996000 +vertex_buffer: 3.282831 +vertex_buffer: 6.059953 +vertex_buffer: 0.406300 +vertex_buffer: 0.135300 +vertex_buffer: 1.026000 +vertex_buffer: 3.253676 +vertex_buffer: 6.053582 +vertex_buffer: 0.409500 +vertex_buffer: 0.137700 +vertex_buffer: 1.113000 +vertex_buffer: 3.449733 +vertex_buffer: 6.054213 +vertex_buffer: 0.397000 +vertex_buffer: 0.155000 +vertex_buffer: 0.995000 +vertex_buffer: 3.051447 +vertex_buffer: 6.047233 +vertex_buffer: 0.424700 +vertex_buffer: 0.124600 +vertex_buffer: 0.959000 +vertex_buffer: 3.060441 +vertex_buffer: 6.053281 +vertex_buffer: 0.423100 +vertex_buffer: 0.121200 +vertex_buffer: 1.002000 +vertex_buffer: 2.851300 +vertex_buffer: 6.039246 +vertex_buffer: 0.441300 +vertex_buffer: 0.115500 +vertex_buffer: 0.963999 +vertex_buffer: 2.854220 +vertex_buffer: 6.045182 +vertex_buffer: 0.440400 +vertex_buffer: 0.112300 +vertex_buffer: 1.029000 +vertex_buffer: 2.651189 +vertex_buffer: 6.031563 +vertex_buffer: 0.459000 +vertex_buffer: 0.109600 +vertex_buffer: 0.990999 +vertex_buffer: 2.653096 +vertex_buffer: 6.036448 +vertex_buffer: 0.458200 +vertex_buffer: 0.106700 +vertex_buffer: 1.128000 +vertex_buffer: 1.970127 +vertex_buffer: 6.016251 +vertex_buffer: 0.520000 +vertex_buffer: 0.101900 +vertex_buffer: 1.165000 +vertex_buffer: 1.975309 +vertex_buffer: 6.012578 +vertex_buffer: 0.520100 +vertex_buffer: 0.104800 +vertex_buffer: 1.097000 +vertex_buffer: 2.228836 +vertex_buffer: 6.018098 +vertex_buffer: 0.496900 +vertex_buffer: 0.104000 +vertex_buffer: 1.059000 +vertex_buffer: 2.224682 +vertex_buffer: 6.021839 +vertex_buffer: 0.496600 +vertex_buffer: 0.100900 +vertex_buffer: 1.220000 +vertex_buffer: 1.684315 +vertex_buffer: 6.009456 +vertex_buffer: 0.546200 +vertex_buffer: 0.105700 +vertex_buffer: 1.256000 +vertex_buffer: 1.689579 +vertex_buffer: 6.004839 +vertex_buffer: 0.546200 +vertex_buffer: 0.108400 +vertex_buffer: 1.682000 +vertex_buffer: 0.697967 +vertex_buffer: 5.958683 +vertex_buffer: 0.636900 +vertex_buffer: 0.137900 +vertex_buffer: 1.505000 +vertex_buffer: 1.033672 +vertex_buffer: 5.979010 +vertex_buffer: 0.606100 +vertex_buffer: 0.124200 +vertex_buffer: 1.473000 +vertex_buffer: 1.027319 +vertex_buffer: 5.983487 +vertex_buffer: 0.606200 +vertex_buffer: 0.121800 +vertex_buffer: 1.653000 +vertex_buffer: 0.685548 +vertex_buffer: 5.962807 +vertex_buffer: 0.637600 +vertex_buffer: 0.135700 +vertex_buffer: 1.897000 +vertex_buffer: 0.393956 +vertex_buffer: 5.932780 +vertex_buffer: 0.664800 +vertex_buffer: 0.155900 +vertex_buffer: 1.870000 +vertex_buffer: 0.373460 +vertex_buffer: 5.936254 +vertex_buffer: 0.666200 +vertex_buffer: 0.153700 +vertex_buffer: 2.126000 +vertex_buffer: 0.129370 +vertex_buffer: 5.906939 +vertex_buffer: 0.688900 +vertex_buffer: 0.175700 +vertex_buffer: 2.147000 +vertex_buffer: 0.153813 +vertex_buffer: 5.904973 +vertex_buffer: 0.687200 +vertex_buffer: 0.177600 +vertex_buffer: 2.423000 +vertex_buffer: -0.046668 +vertex_buffer: 5.875523 +vertex_buffer: 0.706000 +vertex_buffer: 0.201700 +vertex_buffer: 2.440000 +vertex_buffer: -0.020145 +vertex_buffer: 5.872978 +vertex_buffer: 0.704100 +vertex_buffer: 0.203300 +vertex_buffer: 2.780000 +vertex_buffer: -0.124213 +vertex_buffer: 5.840999 +vertex_buffer: 0.715900 +vertex_buffer: 0.233200 +vertex_buffer: 2.766000 +vertex_buffer: -0.156626 +vertex_buffer: 5.841841 +vertex_buffer: 0.718300 +vertex_buffer: 0.231700 +vertex_buffer: 3.155000 +vertex_buffer: -0.181509 +vertex_buffer: 5.807231 +vertex_buffer: 0.723500 +vertex_buffer: 0.266600 +vertex_buffer: 3.146000 +vertex_buffer: -0.218907 +vertex_buffer: 5.807522 +vertex_buffer: 0.726200 +vertex_buffer: 0.265400 +vertex_buffer: 3.555000 +vertex_buffer: -0.213121 +vertex_buffer: 5.769910 +vertex_buffer: 0.727900 +vertex_buffer: 0.302500 +vertex_buffer: 3.550000 +vertex_buffer: -0.253493 +vertex_buffer: 5.769868 +vertex_buffer: 0.731200 +vertex_buffer: 0.301500 +vertex_buffer: 6.275000 +vertex_buffer: 3.721311 +vertex_buffer: 5.579502 +vertex_buffer: 0.444000 +vertex_buffer: 0.642800 +vertex_buffer: 6.313000 +vertex_buffer: 3.740315 +vertex_buffer: 5.576495 +vertex_buffer: 0.442700 +vertex_buffer: 0.646700 +vertex_buffer: 6.066000 +vertex_buffer: 3.814266 +vertex_buffer: 5.616144 +vertex_buffer: 0.433000 +vertex_buffer: 0.624100 +vertex_buffer: 5.118000 +vertex_buffer: 3.995078 +vertex_buffer: 5.771229 +vertex_buffer: 0.407800 +vertex_buffer: 0.537900 +vertex_buffer: 5.474000 +vertex_buffer: 3.945145 +vertex_buffer: 5.715736 +vertex_buffer: 0.415300 +vertex_buffer: 0.570300 +vertex_buffer: 4.719000 +vertex_buffer: 4.033009 +vertex_buffer: 5.827888 +vertex_buffer: 0.400800 +vertex_buffer: 0.501500 +vertex_buffer: 4.270000 +vertex_buffer: 4.054939 +vertex_buffer: 5.885190 +vertex_buffer: 0.394300 +vertex_buffer: 0.460500 +vertex_buffer: 3.727000 +vertex_buffer: 4.054875 +vertex_buffer: 5.938567 +vertex_buffer: 0.388000 +vertex_buffer: 0.411000 +vertex_buffer: 3.047000 +vertex_buffer: 4.023822 +vertex_buffer: 5.980991 +vertex_buffer: 0.382400 +vertex_buffer: 0.349000 +vertex_buffer: 2.382000 +vertex_buffer: 3.968788 +vertex_buffer: 6.010306 +vertex_buffer: 0.378700 +vertex_buffer: 0.287800 +vertex_buffer: 1.883000 +vertex_buffer: 3.892768 +vertex_buffer: 6.026328 +vertex_buffer: 0.378100 +vertex_buffer: 0.240900 +vertex_buffer: 1.523000 +vertex_buffer: 3.784751 +vertex_buffer: 6.039411 +vertex_buffer: 0.380800 +vertex_buffer: 0.205200 +vertex_buffer: 1.273000 +vertex_buffer: 3.631739 +vertex_buffer: 6.049940 +vertex_buffer: 0.387200 +vertex_buffer: 0.177300 +vertex_buffer: 0.906999 +vertex_buffer: 3.973032 +vertex_buffer: 5.807086 +vertex_buffer: 0.321400 +vertex_buffer: 0.164600 +vertex_buffer: 0.562999 +vertex_buffer: 3.936031 +vertex_buffer: 5.810307 +vertex_buffer: 0.312400 +vertex_buffer: 0.134900 +vertex_buffer: 0.564000 +vertex_buffer: 3.918010 +vertex_buffer: 5.826788 +vertex_buffer: 0.314700 +vertex_buffer: 0.134200 +vertex_buffer: 0.912000 +vertex_buffer: 3.954015 +vertex_buffer: 5.822536 +vertex_buffer: 0.323900 +vertex_buffer: 0.164200 +vertex_buffer: 0.555999 +vertex_buffer: 4.021028 +vertex_buffer: 5.812591 +vertex_buffer: 0.305000 +vertex_buffer: 0.137100 +vertex_buffer: 7.065000 +vertex_buffer: 3.984468 +vertex_buffer: 5.449779 +vertex_buffer: 0.433200 +vertex_buffer: 0.717400 +vertex_buffer: 6.812000 +vertex_buffer: 4.040413 +vertex_buffer: 5.495811 +vertex_buffer: 0.423800 +vertex_buffer: 0.696400 +vertex_buffer: 6.796000 +vertex_buffer: 4.028399 +vertex_buffer: 5.505472 +vertex_buffer: 0.424700 +vertex_buffer: 0.694500 +vertex_buffer: 7.064000 +vertex_buffer: 3.965459 +vertex_buffer: 5.457219 +vertex_buffer: 0.434800 +vertex_buffer: 0.716900 +vertex_buffer: 0.571999 +vertex_buffer: 3.883672 +vertex_buffer: 6.106044 +vertex_buffer: 0.341000 +vertex_buffer: 0.126100 +vertex_buffer: 0.568999 +vertex_buffer: 3.904679 +vertex_buffer: 6.098559 +vertex_buffer: 0.338800 +vertex_buffer: 0.126800 +vertex_buffer: 7.066000 +vertex_buffer: 3.978794 +vertex_buffer: 5.181994 +vertex_buffer: 0.415200 +vertex_buffer: 0.733500 +vertex_buffer: 7.050000 +vertex_buffer: 3.967820 +vertex_buffer: 5.160675 +vertex_buffer: 0.412500 +vertex_buffer: 0.734100 +vertex_buffer: 7.077000 +vertex_buffer: 3.977756 +vertex_buffer: 5.212903 +vertex_buffer: 0.418000 +vertex_buffer: 0.731900 +vertex_buffer: 0.882000 +vertex_buffer: 4.087072 +vertex_buffer: 5.776321 +vertex_buffer: 0.308800 +vertex_buffer: 0.166800 +vertex_buffer: 1.095000 +vertex_buffer: 4.122072 +vertex_buffer: 5.775843 +vertex_buffer: 0.313200 +vertex_buffer: 0.185900 +vertex_buffer: 1.096000 +vertex_buffer: 4.111107 +vertex_buffer: 5.746662 +vertex_buffer: 0.310400 +vertex_buffer: 0.186400 +vertex_buffer: 0.882000 +vertex_buffer: 4.076110 +vertex_buffer: 5.745146 +vertex_buffer: 0.305900 +vertex_buffer: 0.167400 +vertex_buffer: 6.778000 +vertex_buffer: 4.053427 +vertex_buffer: 5.483189 +vertex_buffer: 0.421400 +vertex_buffer: 0.694400 +vertex_buffer: 6.752000 +vertex_buffer: 4.053615 +vertex_buffer: 5.328125 +vertex_buffer: 0.408300 +vertex_buffer: 0.699200 +vertex_buffer: 6.816000 +vertex_buffer: 4.044436 +vertex_buffer: 5.475887 +vertex_buffer: 0.422500 +vertex_buffer: 0.697700 +vertex_buffer: 6.795000 +vertex_buffer: 4.044626 +vertex_buffer: 5.319826 +vertex_buffer: 0.410000 +vertex_buffer: 0.704000 +vertex_buffer: 7.241000 +vertex_buffer: 3.424557 +vertex_buffer: 5.378862 +vertex_buffer: 0.484800 +vertex_buffer: 0.730900 +vertex_buffer: 7.246000 +vertex_buffer: 3.554552 +vertex_buffer: 5.380754 +vertex_buffer: 0.473000 +vertex_buffer: 0.731400 +vertex_buffer: 7.145000 +vertex_buffer: 3.551500 +vertex_buffer: 5.422663 +vertex_buffer: 0.472300 +vertex_buffer: 0.721300 +vertex_buffer: 7.139000 +vertex_buffer: 3.402742 +vertex_buffer: 5.420340 +vertex_buffer: 0.485800 +vertex_buffer: 0.720700 +vertex_buffer: 7.239000 +vertex_buffer: 3.333665 +vertex_buffer: 5.376942 +vertex_buffer: 0.493500 +vertex_buffer: 0.730900 +vertex_buffer: 7.129000 +vertex_buffer: 3.258313 +vertex_buffer: 5.422170 +vertex_buffer: 0.499800 +vertex_buffer: 0.719700 +vertex_buffer: 6.546000 +vertex_buffer: 3.924355 +vertex_buffer: 5.543285 +vertex_buffer: 0.428400 +vertex_buffer: 0.669600 +vertex_buffer: 6.495000 +vertex_buffer: 4.087629 +vertex_buffer: 5.317235 +vertex_buffer: 0.394700 +vertex_buffer: 0.674500 +vertex_buffer: 6.659000 +vertex_buffer: 4.052661 +vertex_buffer: 5.290369 +vertex_buffer: 0.400700 +vertex_buffer: 0.689600 +vertex_buffer: 6.678000 +vertex_buffer: 4.063673 +vertex_buffer: 5.280669 +vertex_buffer: 0.400200 +vertex_buffer: 0.692200 +vertex_buffer: 6.689000 +vertex_buffer: 4.045652 +vertex_buffer: 5.298048 +vertex_buffer: 0.403300 +vertex_buffer: 0.692300 +vertex_buffer: 6.564000 +vertex_buffer: 3.999645 +vertex_buffer: 5.304150 +vertex_buffer: 0.402800 +vertex_buffer: 0.679700 +vertex_buffer: 6.590000 +vertex_buffer: 3.995635 +vertex_buffer: 5.312876 +vertex_buffer: 0.404000 +vertex_buffer: 0.681800 +vertex_buffer: 6.680000 +vertex_buffer: 4.067701 +vertex_buffer: 5.257845 +vertex_buffer: 0.398500 +vertex_buffer: 0.693800 +vertex_buffer: 6.672000 +vertex_buffer: 4.055732 +vertex_buffer: 5.232574 +vertex_buffer: 0.396100 +vertex_buffer: 0.694900 +vertex_buffer: 5.742000 +vertex_buffer: 3.870557 +vertex_buffer: 5.377138 +vertex_buffer: 0.447700 +vertex_buffer: 0.589700 +vertex_buffer: 5.779000 +vertex_buffer: 3.871235 +vertex_buffer: 5.641031 +vertex_buffer: 0.425400 +vertex_buffer: 0.597600 +vertex_buffer: 6.574000 +vertex_buffer: 3.962357 +vertex_buffer: 5.540226 +vertex_buffer: 0.425000 +vertex_buffer: 0.672600 +vertex_buffer: 6.572000 +vertex_buffer: 3.979364 +vertex_buffer: 5.534718 +vertex_buffer: 0.422900 +vertex_buffer: 0.672700 +vertex_buffer: 6.014000 +vertex_buffer: 4.080470 +vertex_buffer: 5.448570 +vertex_buffer: 0.394000 +vertex_buffer: 0.627900 +vertex_buffer: 6.038000 +vertex_buffer: 4.080281 +vertex_buffer: 5.604630 +vertex_buffer: 0.407700 +vertex_buffer: 0.624400 +vertex_buffer: 5.795000 +vertex_buffer: 3.887209 +vertex_buffer: 5.663206 +vertex_buffer: 0.423500 +vertex_buffer: 0.599500 +vertex_buffer: 1.072000 +vertex_buffer: 2.449749 +vertex_buffer: 5.999982 +vertex_buffer: 0.477400 +vertex_buffer: 0.108000 +vertex_buffer: 1.056000 +vertex_buffer: 2.449090 +vertex_buffer: 6.023953 +vertex_buffer: 0.477000 +vertex_buffer: 0.105800 +vertex_buffer: 1.381000 +vertex_buffer: 1.372995 +vertex_buffer: 5.970079 +vertex_buffer: 0.575100 +vertex_buffer: 0.116800 +vertex_buffer: 1.366000 +vertex_buffer: 1.369585 +vertex_buffer: 5.994883 +vertex_buffer: 0.575400 +vertex_buffer: 0.114600 +vertex_buffer: 6.599000 +vertex_buffer: 2.960689 +vertex_buffer: 5.481006 +vertex_buffer: 0.516200 +vertex_buffer: 0.668900 +vertex_buffer: 6.604000 +vertex_buffer: 2.495419 +vertex_buffer: 5.493910 +vertex_buffer: 0.556300 +vertex_buffer: 0.658700 +vertex_buffer: 6.642000 +vertex_buffer: 2.492533 +vertex_buffer: 5.490612 +vertex_buffer: 0.557200 +vertex_buffer: 0.661600 +vertex_buffer: 6.971000 +vertex_buffer: 2.618430 +vertex_buffer: 5.157135 +vertex_buffer: 0.571400 +vertex_buffer: 0.716500 +vertex_buffer: 6.984000 +vertex_buffer: 2.616411 +vertex_buffer: 5.200926 +vertex_buffer: 0.568500 +vertex_buffer: 0.713600 +vertex_buffer: 6.555000 +vertex_buffer: 2.964368 +vertex_buffer: 5.218408 +vertex_buffer: 0.514000 +vertex_buffer: 0.645600 +vertex_buffer: 7.121000 +vertex_buffer: 3.076922 +vertex_buffer: 5.363109 +vertex_buffer: 0.521800 +vertex_buffer: 0.724300 +vertex_buffer: 7.113000 +vertex_buffer: 3.077379 +vertex_buffer: 5.321052 +vertex_buffer: 0.524200 +vertex_buffer: 0.728200 +vertex_buffer: 7.141000 +vertex_buffer: 3.158391 +vertex_buffer: 5.308101 +vertex_buffer: 0.517500 +vertex_buffer: 0.733100 +vertex_buffer: 7.155000 +vertex_buffer: 3.160980 +vertex_buffer: 5.351272 +vertex_buffer: 0.514700 +vertex_buffer: 0.729400 +vertex_buffer: 7.144000 +vertex_buffer: 3.167877 +vertex_buffer: 5.235524 +vertex_buffer: 0.519600 +vertex_buffer: 0.739900 +vertex_buffer: 7.109000 +vertex_buffer: 3.086271 +vertex_buffer: 5.227436 +vertex_buffer: 0.527800 +vertex_buffer: 0.736600 +vertex_buffer: 7.076000 +vertex_buffer: 2.912526 +vertex_buffer: 5.326190 +vertex_buffer: 0.537700 +vertex_buffer: 0.719800 +vertex_buffer: 7.063000 +vertex_buffer: 2.915911 +vertex_buffer: 5.234352 +vertex_buffer: 0.542200 +vertex_buffer: 0.727200 +vertex_buffer: 6.956000 +vertex_buffer: 2.592929 +vertex_buffer: 5.440157 +vertex_buffer: 0.557400 +vertex_buffer: 0.691300 +vertex_buffer: 6.994000 +vertex_buffer: 2.602989 +vertex_buffer: 5.433540 +vertex_buffer: 0.557400 +vertex_buffer: 0.694800 +vertex_buffer: 7.063000 +vertex_buffer: 2.904015 +vertex_buffer: 5.429003 +vertex_buffer: 0.532400 +vertex_buffer: 0.709000 +vertex_buffer: 6.885000 +vertex_buffer: 2.184675 +vertex_buffer: 5.442623 +vertex_buffer: 0.589700 +vertex_buffer: 0.671800 +vertex_buffer: 6.775000 +vertex_buffer: 4.049404 +vertex_buffer: 5.503119 +vertex_buffer: 0.422800 +vertex_buffer: 0.693500 +vertex_buffer: 6.757000 +vertex_buffer: 4.049406 +vertex_buffer: 5.501130 +vertex_buffer: 0.422200 +vertex_buffer: 0.692200 +vertex_buffer: 6.739000 +vertex_buffer: 4.041395 +vertex_buffer: 5.508930 +vertex_buffer: 0.422600 +vertex_buffer: 0.690400 +vertex_buffer: 6.756000 +vertex_buffer: 4.037393 +vertex_buffer: 5.511793 +vertex_buffer: 0.423300 +vertex_buffer: 0.691500 +vertex_buffer: 6.755000 +vertex_buffer: 3.997394 +vertex_buffer: 5.510575 +vertex_buffer: 0.426400 +vertex_buffer: 0.690400 +vertex_buffer: 7.407000 +vertex_buffer: 3.821051 +vertex_buffer: 4.970423 +vertex_buffer: 0.446900 +vertex_buffer: 0.772700 +vertex_buffer: 6.630000 +vertex_buffer: 3.993399 +vertex_buffer: 5.505770 +vertex_buffer: 0.421300 +vertex_buffer: 0.679800 +vertex_buffer: 6.604000 +vertex_buffer: 3.993588 +vertex_buffer: 5.350708 +vertex_buffer: 0.407300 +vertex_buffer: 0.682200 +vertex_buffer: 7.396000 +vertex_buffer: 3.877765 +vertex_buffer: 5.204231 +vertex_buffer: 0.444700 +vertex_buffer: 0.752200 +vertex_buffer: 7.390000 +vertex_buffer: 3.900766 +vertex_buffer: 5.203783 +vertex_buffer: 0.442700 +vertex_buffer: 0.751600 +vertex_buffer: 7.341000 +vertex_buffer: 3.915632 +vertex_buffer: 5.314274 +vertex_buffer: 0.443300 +vertex_buffer: 0.741700 +vertex_buffer: 7.346000 +vertex_buffer: 3.892628 +vertex_buffer: 5.317687 +vertex_buffer: 0.444800 +vertex_buffer: 0.741900 +vertex_buffer: 7.417000 +vertex_buffer: 3.870906 +vertex_buffer: 5.089935 +vertex_buffer: 0.443800 +vertex_buffer: 0.761900 +vertex_buffer: 7.411000 +vertex_buffer: 3.893903 +vertex_buffer: 5.092457 +vertex_buffer: 0.441400 +vertex_buffer: 0.760900 +vertex_buffer: 7.415000 +vertex_buffer: 3.822906 +vertex_buffer: 5.088636 +vertex_buffer: 0.448300 +vertex_buffer: 0.762600 +vertex_buffer: 0.307000 +vertex_buffer: 4.021034 +vertex_buffer: 5.807157 +vertex_buffer: 0.296400 +vertex_buffer: 0.116200 +vertex_buffer: 0.307999 +vertex_buffer: 4.005022 +vertex_buffer: 5.815707 +vertex_buffer: 0.298100 +vertex_buffer: 0.115800 +vertex_buffer: 0.555000 +vertex_buffer: 4.039038 +vertex_buffer: 5.804111 +vertex_buffer: 0.303200 +vertex_buffer: 0.137600 +vertex_buffer: 0.308999 +vertex_buffer: 3.923026 +vertex_buffer: 5.813643 +vertex_buffer: 0.305100 +vertex_buffer: 0.113300 +vertex_buffer: 7.259000 +vertex_buffer: 3.864542 +vertex_buffer: 5.387858 +vertex_buffer: 0.446100 +vertex_buffer: 0.732800 +vertex_buffer: 7.252000 +vertex_buffer: 3.700547 +vertex_buffer: 5.384385 +vertex_buffer: 0.460200 +vertex_buffer: 0.732000 +vertex_buffer: 7.261000 +vertex_buffer: 3.916541 +vertex_buffer: 5.389345 +vertex_buffer: 0.442100 +vertex_buffer: 0.733100 +vertex_buffer: 7.160000 +vertex_buffer: 3.943489 +vertex_buffer: 5.432212 +vertex_buffer: 0.438200 +vertex_buffer: 0.724600 +vertex_buffer: 7.158000 +vertex_buffer: 3.891491 +vertex_buffer: 5.430691 +vertex_buffer: 0.442500 +vertex_buffer: 0.724000 +vertex_buffer: 0.307000 +vertex_buffer: 4.027068 +vertex_buffer: 5.778411 +vertex_buffer: 0.293800 +vertex_buffer: 0.117000 +vertex_buffer: 0.553999 +vertex_buffer: 4.045070 +vertex_buffer: 5.776381 +vertex_buffer: 0.300600 +vertex_buffer: 0.138200 +vertex_buffer: 0.555000 +vertex_buffer: 4.034110 +vertex_buffer: 5.744215 +vertex_buffer: 0.297600 +vertex_buffer: 0.139000 +vertex_buffer: 0.307999 +vertex_buffer: 4.017110 +vertex_buffer: 5.744282 +vertex_buffer: 0.290800 +vertex_buffer: 0.117900 +vertex_buffer: 0.308999 +vertex_buffer: 3.906005 +vertex_buffer: 5.831179 +vertex_buffer: 0.307300 +vertex_buffer: 0.112600 +vertex_buffer: 7.258000 +vertex_buffer: 3.938548 +vertex_buffer: 5.383944 +vertex_buffer: 0.440600 +vertex_buffer: 0.733000 +vertex_buffer: 7.159000 +vertex_buffer: 3.963499 +vertex_buffer: 5.424780 +vertex_buffer: 0.436600 +vertex_buffer: 0.724900 +vertex_buffer: 0.311000 +vertex_buffer: 3.873654 +vertex_buffer: 6.119599 +vertex_buffer: 0.333600 +vertex_buffer: 0.103500 +vertex_buffer: 0.311000 +vertex_buffer: 3.892664 +vertex_buffer: 6.112020 +vertex_buffer: 0.331700 +vertex_buffer: 0.104400 +vertex_buffer: 7.393000 +vertex_buffer: 3.904894 +vertex_buffer: 5.098479 +vertex_buffer: 0.439300 +vertex_buffer: 0.759200 +vertex_buffer: 7.394000 +vertex_buffer: 3.902982 +vertex_buffer: 5.026949 +vertex_buffer: 0.438100 +vertex_buffer: 0.765000 +vertex_buffer: 7.174000 +vertex_buffer: 3.952909 +vertex_buffer: 5.086542 +vertex_buffer: 0.422100 +vertex_buffer: 0.749800 +vertex_buffer: 7.241000 +vertex_buffer: 3.274391 +vertex_buffer: 5.375715 +vertex_buffer: 0.499600 +vertex_buffer: 0.731200 +vertex_buffer: 7.242000 +vertex_buffer: 3.240843 +vertex_buffer: 5.369015 +vertex_buffer: 0.503300 +vertex_buffer: 0.731900 +vertex_buffer: 7.159000 +vertex_buffer: 3.201010 +vertex_buffer: 5.412225 +vertex_buffer: 0.506400 +vertex_buffer: 0.722900 +vertex_buffer: 7.166000 +vertex_buffer: 3.173387 +vertex_buffer: 5.403695 +vertex_buffer: 0.510000 +vertex_buffer: 0.724400 +vertex_buffer: 7.108000 +vertex_buffer: 3.090089 +vertex_buffer: 5.426351 +vertex_buffer: 0.516200 +vertex_buffer: 0.716700 +vertex_buffer: 7.124000 +vertex_buffer: 3.079314 +vertex_buffer: 5.416255 +vertex_buffer: 0.518200 +vertex_buffer: 0.718900 +vertex_buffer: 6.719000 +vertex_buffer: 1.643322 +vertex_buffer: 5.459179 +vertex_buffer: 0.628100 +vertex_buffer: 0.637500 +vertex_buffer: 6.510000 +vertex_buffer: 1.089627 +vertex_buffer: 5.476794 +vertex_buffer: 0.664600 +vertex_buffer: 0.598500 +vertex_buffer: 6.271000 +vertex_buffer: 0.633869 +vertex_buffer: 5.496512 +vertex_buffer: 0.692600 +vertex_buffer: 0.562000 +vertex_buffer: 5.986000 +vertex_buffer: 0.269421 +vertex_buffer: 5.519618 +vertex_buffer: 0.713900 +vertex_buffer: 0.526900 +vertex_buffer: 5.641000 +vertex_buffer: -0.012627 +vertex_buffer: 5.549834 +vertex_buffer: 0.729600 +vertex_buffer: 0.490600 +vertex_buffer: 5.197000 +vertex_buffer: -0.225226 +vertex_buffer: 5.589799 +vertex_buffer: 0.740900 +vertex_buffer: 0.448400 +vertex_buffer: 4.616000 +vertex_buffer: -0.376667 +vertex_buffer: 5.644580 +vertex_buffer: 0.748600 +vertex_buffer: 0.395400 +vertex_buffer: 7.151000 +vertex_buffer: 3.714496 +vertex_buffer: 5.425767 +vertex_buffer: 0.457900 +vertex_buffer: 0.722400 +vertex_buffer: 7.055000 +vertex_buffer: 3.734465 +vertex_buffer: 5.451614 +vertex_buffer: 0.454700 +vertex_buffer: 0.713600 +vertex_buffer: 7.062000 +vertex_buffer: 3.915460 +vertex_buffer: 5.455733 +vertex_buffer: 0.439000 +vertex_buffer: 0.716100 +vertex_buffer: 0.708000 +vertex_buffer: 2.492911 +vertex_buffer: 6.041574 +vertex_buffer: 0.466400 +vertex_buffer: 0.076800 +vertex_buffer: 0.741999 +vertex_buffer: 2.489950 +vertex_buffer: 6.039368 +vertex_buffer: 0.466900 +vertex_buffer: 0.079900 +vertex_buffer: 0.686999 +vertex_buffer: 2.492061 +vertex_buffer: 6.035655 +vertex_buffer: 0.466700 +vertex_buffer: 0.074600 +vertex_buffer: 0.517000 +vertex_buffer: 2.925090 +vertex_buffer: 5.743452 +vertex_buffer: 0.434200 +vertex_buffer: 0.041600 +vertex_buffer: 0.511999 +vertex_buffer: 2.920268 +vertex_buffer: 5.799177 +vertex_buffer: 0.432500 +vertex_buffer: 0.046600 +vertex_buffer: 1.065000 +vertex_buffer: 2.457159 +vertex_buffer: 5.730452 +vertex_buffer: 0.481200 +vertex_buffer: 0.131700 +vertex_buffer: 0.679000 +vertex_buffer: 2.492613 +vertex_buffer: 6.014641 +vertex_buffer: 0.467600 +vertex_buffer: 0.072300 +vertex_buffer: 0.679000 +vertex_buffer: 2.495623 +vertex_buffer: 5.976590 +vertex_buffer: 0.468900 +vertex_buffer: 0.068700 +vertex_buffer: 1.018000 +vertex_buffer: 2.447954 +vertex_buffer: 6.028774 +vertex_buffer: 0.476500 +vertex_buffer: 0.102800 +vertex_buffer: -0.000000 +vertex_buffer: 3.101020 +vertex_buffer: 5.817830 +vertex_buffer: 0.384400 +vertex_buffer: 0.029200 +vertex_buffer: -0.000000 +vertex_buffer: 3.110590 +vertex_buffer: 5.759189 +vertex_buffer: 0.386000 +vertex_buffer: 0.024200 +vertex_buffer: 0.301000 +vertex_buffer: 3.074498 +vertex_buffer: 6.056509 +vertex_buffer: 0.400800 +vertex_buffer: 0.062100 +vertex_buffer: 0.305000 +vertex_buffer: 3.083319 +vertex_buffer: 6.079671 +vertex_buffer: 0.399200 +vertex_buffer: 0.065200 +vertex_buffer: 0.301000 +vertex_buffer: 3.074878 +vertex_buffer: 6.020398 +vertex_buffer: 0.402500 +vertex_buffer: 0.058200 +vertex_buffer: 0.178999 +vertex_buffer: 3.134543 +vertex_buffer: 6.095752 +vertex_buffer: 0.387600 +vertex_buffer: 0.060600 +vertex_buffer: 0.170015 +vertex_buffer: 3.194879 +vertex_buffer: 6.097709 +vertex_buffer: 0.382600 +vertex_buffer: 0.063800 +vertex_buffer: -0.000000 +vertex_buffer: 3.212067 +vertex_buffer: 6.104634 +vertex_buffer: 0.373000 +vertex_buffer: 0.051600 +vertex_buffer: -0.000000 +vertex_buffer: 3.143560 +vertex_buffer: 6.101644 +vertex_buffer: 0.376400 +vertex_buffer: 0.049100 +vertex_buffer: 0.327999 +vertex_buffer: 3.178908 +vertex_buffer: 6.091790 +vertex_buffer: 0.391600 +vertex_buffer: 0.075000 +vertex_buffer: 0.317000 +vertex_buffer: 3.111432 +vertex_buffer: 6.088937 +vertex_buffer: 0.397000 +vertex_buffer: 0.068900 +vertex_buffer: 7.382000 +vertex_buffer: 3.380263 +vertex_buffer: 4.933475 +vertex_buffer: 0.490000 +vertex_buffer: 0.775600 +vertex_buffer: 7.367000 +vertex_buffer: 3.390060 +vertex_buffer: 4.917622 +vertex_buffer: 0.489800 +vertex_buffer: 0.777700 +vertex_buffer: 7.358000 +vertex_buffer: 3.360794 +vertex_buffer: 4.917903 +vertex_buffer: 0.493300 +vertex_buffer: 0.776800 +vertex_buffer: 7.373000 +vertex_buffer: 3.350981 +vertex_buffer: 4.934727 +vertex_buffer: 0.493200 +vertex_buffer: 0.774800 +vertex_buffer: 7.379000 +vertex_buffer: 3.350882 +vertex_buffer: 4.963497 +vertex_buffer: 0.493000 +vertex_buffer: 0.772300 +vertex_buffer: 7.388000 +vertex_buffer: 3.380187 +vertex_buffer: 4.962238 +vertex_buffer: 0.489700 +vertex_buffer: 0.773100 +vertex_buffer: 7.364000 +vertex_buffer: 3.323990 +vertex_buffer: 5.087562 +vertex_buffer: 0.496000 +vertex_buffer: 0.760600 +vertex_buffer: 7.321000 +vertex_buffer: 3.301386 +vertex_buffer: 5.100352 +vertex_buffer: 0.500700 +vertex_buffer: 0.758500 +vertex_buffer: 7.316000 +vertex_buffer: 3.306611 +vertex_buffer: 5.030970 +vertex_buffer: 0.501000 +vertex_buffer: 0.764900 +vertex_buffer: 7.363000 +vertex_buffer: 3.331136 +vertex_buffer: 5.016309 +vertex_buffer: 0.495800 +vertex_buffer: 0.767100 +vertex_buffer: 7.387000 +vertex_buffer: 3.350724 +vertex_buffer: 5.009150 +vertex_buffer: 0.492600 +vertex_buffer: 0.768300 +vertex_buffer: 7.354000 +vertex_buffer: 3.331322 +vertex_buffer: 4.969654 +vertex_buffer: 0.496400 +vertex_buffer: 0.771300 +vertex_buffer: 7.363000 +vertex_buffer: 3.890097 +vertex_buffer: 4.932051 +vertex_buffer: 0.436100 +vertex_buffer: 0.773200 +vertex_buffer: 7.381000 +vertex_buffer: 3.903078 +vertex_buffer: 4.948469 +vertex_buffer: 0.437000 +vertex_buffer: 0.771400 +vertex_buffer: 7.386000 +vertex_buffer: 3.903040 +vertex_buffer: 4.979247 +vertex_buffer: 0.437300 +vertex_buffer: 0.768800 +vertex_buffer: 7.171000 +vertex_buffer: 3.951033 +vertex_buffer: 4.984264 +vertex_buffer: 0.421000 +vertex_buffer: 0.759500 +vertex_buffer: 7.174000 +vertex_buffer: 3.951991 +vertex_buffer: 5.020010 +vertex_buffer: 0.421400 +vertex_buffer: 0.756000 +vertex_buffer: 7.371000 +vertex_buffer: 3.911768 +vertex_buffer: 5.201848 +vertex_buffer: 0.440800 +vertex_buffer: 0.750700 +vertex_buffer: 7.153000 +vertex_buffer: 3.968528 +vertex_buffer: 5.400862 +vertex_buffer: 0.435000 +vertex_buffer: 0.725700 +vertex_buffer: 7.066000 +vertex_buffer: 3.987499 +vertex_buffer: 5.424838 +vertex_buffer: 0.431700 +vertex_buffer: 0.718800 +vertex_buffer: 1.094000 +vertex_buffer: 1.348785 +vertex_buffer: 5.935443 +vertex_buffer: 0.576900 +vertex_buffer: 0.089300 +vertex_buffer: 7.126000 +vertex_buffer: 3.076574 +vertex_buffer: 5.395181 +vertex_buffer: 0.519800 +vertex_buffer: 0.721200 +vertex_buffer: 7.164000 +vertex_buffer: 3.162683 +vertex_buffer: 5.383419 +vertex_buffer: 0.512300 +vertex_buffer: 0.726400 +vertex_buffer: 7.097000 +vertex_buffer: 3.186858 +vertex_buffer: 5.076458 +vertex_buffer: 0.524600 +vertex_buffer: 0.756800 +vertex_buffer: 7.036000 +vertex_buffer: 3.179970 +vertex_buffer: 5.079792 +vertex_buffer: 0.530200 +vertex_buffer: 0.757700 +vertex_buffer: 7.026000 +vertex_buffer: 3.181370 +vertex_buffer: 5.028232 +vertex_buffer: 0.530100 +vertex_buffer: 0.762800 +vertex_buffer: 7.087000 +vertex_buffer: 3.187309 +vertex_buffer: 5.019910 +vertex_buffer: 0.524600 +vertex_buffer: 0.762800 +vertex_buffer: 7.040000 +vertex_buffer: 3.155063 +vertex_buffer: 5.125915 +vertex_buffer: 0.531800 +vertex_buffer: 0.752500 +vertex_buffer: 7.415000 +vertex_buffer: 3.820997 +vertex_buffer: 5.015137 +vertex_buffer: 0.447600 +vertex_buffer: 0.768900 +vertex_buffer: 7.409000 +vertex_buffer: 3.687998 +vertex_buffer: 5.012717 +vertex_buffer: 0.460000 +vertex_buffer: 0.770000 +vertex_buffer: 7.401000 +vertex_buffer: 3.688052 +vertex_buffer: 4.968018 +vertex_buffer: 0.459500 +vertex_buffer: 0.774100 +vertex_buffer: 7.417000 +vertex_buffer: 3.867994 +vertex_buffer: 5.016397 +vertex_buffer: 0.443000 +vertex_buffer: 0.767800 +vertex_buffer: 7.412000 +vertex_buffer: 3.890991 +vertex_buffer: 5.019919 +vertex_buffer: 0.440500 +vertex_buffer: 0.766700 +vertex_buffer: 7.404000 +vertex_buffer: 3.567002 +vertex_buffer: 5.009855 +vertex_buffer: 0.471300 +vertex_buffer: 0.770300 +vertex_buffer: 7.400000 +vertex_buffer: 3.476002 +vertex_buffer: 5.008861 +vertex_buffer: 0.479900 +vertex_buffer: 0.770000 +vertex_buffer: 7.161000 +vertex_buffer: 3.216266 +vertex_buffer: 5.074730 +vertex_buffer: 0.517800 +vertex_buffer: 0.757500 +vertex_buffer: 7.238000 +vertex_buffer: 3.261456 +vertex_buffer: 5.054759 +vertex_buffer: 0.509400 +vertex_buffer: 0.761000 +vertex_buffer: 7.251000 +vertex_buffer: 3.261063 +vertex_buffer: 5.121390 +vertex_buffer: 0.508300 +vertex_buffer: 0.754900 +vertex_buffer: 7.398000 +vertex_buffer: 3.421141 +vertex_buffer: 5.007745 +vertex_buffer: 0.485200 +vertex_buffer: 0.769600 +vertex_buffer: 7.396000 +vertex_buffer: 3.380068 +vertex_buffer: 5.006904 +vertex_buffer: 0.489400 +vertex_buffer: 0.769100 +vertex_buffer: 7.030000 +vertex_buffer: 3.190637 +vertex_buffer: 4.970094 +vertex_buffer: 0.528700 +vertex_buffer: 0.768800 +vertex_buffer: 7.069000 +vertex_buffer: 3.189697 +vertex_buffer: 4.965413 +vertex_buffer: 0.525300 +vertex_buffer: 0.768200 +vertex_buffer: 7.080000 +vertex_buffer: 3.187593 +vertex_buffer: 4.984242 +vertex_buffer: 0.524700 +vertex_buffer: 0.766300 +vertex_buffer: 7.020000 +vertex_buffer: 3.180675 +vertex_buffer: 4.993543 +vertex_buffer: 0.530100 +vertex_buffer: 0.766400 +vertex_buffer: 7.349000 +vertex_buffer: 3.331445 +vertex_buffer: 4.938923 +vertex_buffer: 0.496500 +vertex_buffer: 0.774100 +vertex_buffer: 7.333000 +vertex_buffer: 3.341269 +vertex_buffer: 4.922087 +vertex_buffer: 0.497000 +vertex_buffer: 0.776200 +vertex_buffer: 7.274000 +vertex_buffer: 3.257252 +vertex_buffer: 5.266911 +vertex_buffer: 0.504300 +vertex_buffer: 0.742300 +vertex_buffer: 7.312000 +vertex_buffer: 3.287216 +vertex_buffer: 5.186389 +vertex_buffer: 0.501400 +vertex_buffer: 0.750500 +vertex_buffer: 7.345000 +vertex_buffer: 3.303906 +vertex_buffer: 5.190210 +vertex_buffer: 0.497700 +vertex_buffer: 0.751100 +vertex_buffer: 7.303000 +vertex_buffer: 3.270918 +vertex_buffer: 5.289551 +vertex_buffer: 0.500700 +vertex_buffer: 0.741200 +vertex_buffer: 7.233000 +vertex_buffer: 3.226150 +vertex_buffer: 5.350587 +vertex_buffer: 0.505800 +vertex_buffer: 0.733100 +vertex_buffer: 7.214000 +vertex_buffer: 3.218464 +vertex_buffer: 5.320205 +vertex_buffer: 0.508900 +vertex_buffer: 0.735300 +vertex_buffer: 7.120000 +vertex_buffer: 3.964835 +vertex_buffer: 5.147107 +vertex_buffer: 0.417800 +vertex_buffer: 0.740900 +vertex_buffer: 7.097000 +vertex_buffer: 3.954853 +vertex_buffer: 5.132733 +vertex_buffer: 0.414800 +vertex_buffer: 0.741000 +vertex_buffer: 5.117000 +vertex_buffer: 4.224341 +vertex_buffer: 5.554891 +vertex_buffer: 0.370700 +vertex_buffer: 0.547800 +vertex_buffer: 4.704000 +vertex_buffer: 4.261270 +vertex_buffer: 5.612811 +vertex_buffer: 0.363200 +vertex_buffer: 0.510100 +vertex_buffer: 4.252000 +vertex_buffer: 4.282202 +vertex_buffer: 5.668910 +vertex_buffer: 0.356400 +vertex_buffer: 0.468800 +vertex_buffer: 3.712000 +vertex_buffer: 4.282137 +vertex_buffer: 5.721675 +vertex_buffer: 0.349900 +vertex_buffer: 0.419600 +vertex_buffer: 3.035000 +vertex_buffer: 4.252083 +vertex_buffer: 5.765869 +vertex_buffer: 0.344000 +vertex_buffer: 0.358000 +vertex_buffer: 2.363000 +vertex_buffer: 4.199045 +vertex_buffer: 5.797508 +vertex_buffer: 0.339700 +vertex_buffer: 0.296700 +vertex_buffer: 1.839000 +vertex_buffer: 4.123028 +vertex_buffer: 5.812879 +vertex_buffer: 0.336700 +vertex_buffer: 0.248500 +vertex_buffer: 1.437000 +vertex_buffer: 4.047021 +vertex_buffer: 5.817625 +vertex_buffer: 0.333000 +vertex_buffer: 0.211500 +vertex_buffer: 1.131000 +vertex_buffer: 3.991018 +vertex_buffer: 5.820283 +vertex_buffer: 0.328400 +vertex_buffer: 0.183700 +vertex_buffer: 7.379000 +vertex_buffer: 3.678107 +vertex_buffer: 4.922937 +vertex_buffer: 0.459600 +vertex_buffer: 0.778900 +vertex_buffer: 7.396000 +vertex_buffer: 3.688087 +vertex_buffer: 4.939241 +vertex_buffer: 0.459100 +vertex_buffer: 0.776700 +vertex_buffer: 6.544000 +vertex_buffer: 3.988624 +vertex_buffer: 5.320908 +vertex_buffer: 0.403700 +vertex_buffer: 0.677300 +vertex_buffer: 6.569000 +vertex_buffer: 3.989616 +vertex_buffer: 5.326786 +vertex_buffer: 0.404700 +vertex_buffer: 0.679400 +vertex_buffer: 4.686000 +vertex_buffer: 4.355355 +vertex_buffer: 5.543365 +vertex_buffer: 0.345100 +vertex_buffer: 0.513400 +vertex_buffer: 5.108000 +vertex_buffer: 4.317427 +vertex_buffer: 5.483809 +vertex_buffer: 0.352600 +vertex_buffer: 0.552200 +vertex_buffer: 5.554000 +vertex_buffer: 4.253511 +vertex_buffer: 5.414423 +vertex_buffer: 0.362300 +vertex_buffer: 0.593200 +vertex_buffer: 6.072000 +vertex_buffer: 4.154618 +vertex_buffer: 5.326802 +vertex_buffer: 0.375300 +vertex_buffer: 0.640700 +vertex_buffer: 6.496000 +vertex_buffer: 4.064704 +vertex_buffer: 5.255655 +vertex_buffer: 0.387600 +vertex_buffer: 0.679600 +vertex_buffer: 6.693000 +vertex_buffer: 4.022743 +vertex_buffer: 5.223533 +vertex_buffer: 0.395400 +vertex_buffer: 0.699800 +vertex_buffer: 6.718000 +vertex_buffer: 4.016747 +vertex_buffer: 5.219308 +vertex_buffer: 0.396900 +vertex_buffer: 0.703600 +vertex_buffer: 6.765000 +vertex_buffer: 4.006758 +vertex_buffer: 5.210963 +vertex_buffer: 0.399300 +vertex_buffer: 0.709400 +vertex_buffer: 7.031000 +vertex_buffer: 3.082283 +vertex_buffer: 5.141156 +vertex_buffer: 0.536800 +vertex_buffer: 0.746500 +vertex_buffer: 6.999000 +vertex_buffer: 2.919289 +vertex_buffer: 5.142432 +vertex_buffer: 0.549900 +vertex_buffer: 0.736300 +vertex_buffer: 6.668000 +vertex_buffer: 1.672600 +vertex_buffer: 5.170115 +vertex_buffer: 0.646300 +vertex_buffer: 0.658100 +vertex_buffer: 6.830000 +vertex_buffer: 2.206663 +vertex_buffer: 5.154284 +vertex_buffer: 0.606600 +vertex_buffer: 0.693700 +vertex_buffer: 6.462000 +vertex_buffer: 1.127224 +vertex_buffer: 5.188234 +vertex_buffer: 0.684400 +vertex_buffer: 0.617400 +vertex_buffer: 6.223000 +vertex_buffer: 0.676129 +vertex_buffer: 5.207506 +vertex_buffer: 0.714300 +vertex_buffer: 0.578200 +vertex_buffer: 5.940000 +vertex_buffer: 0.312489 +vertex_buffer: 5.231991 +vertex_buffer: 0.737300 +vertex_buffer: 0.539500 +vertex_buffer: 5.597000 +vertex_buffer: 0.033719 +vertex_buffer: 5.261748 +vertex_buffer: 0.754700 +vertex_buffer: 0.499600 +vertex_buffer: 5.158000 +vertex_buffer: -0.173292 +vertex_buffer: 5.302549 +vertex_buffer: 0.767700 +vertex_buffer: 0.454100 +vertex_buffer: 4.582000 +vertex_buffer: -0.319409 +vertex_buffer: 5.355970 +vertex_buffer: 0.776700 +vertex_buffer: 0.397800 +vertex_buffer: 3.990000 +vertex_buffer: -0.400474 +vertex_buffer: 5.413808 +vertex_buffer: 0.779800 +vertex_buffer: 0.340600 +vertex_buffer: 3.501000 +vertex_buffer: -0.414360 +vertex_buffer: 5.459591 +vertex_buffer: 0.777100 +vertex_buffer: 0.293000 +vertex_buffer: 3.075000 +vertex_buffer: -0.373539 +vertex_buffer: 5.500115 +vertex_buffer: 0.769700 +vertex_buffer: 0.251500 +vertex_buffer: 2.674000 +vertex_buffer: -0.292200 +vertex_buffer: 5.537738 +vertex_buffer: 0.757200 +vertex_buffer: 0.212800 +vertex_buffer: 2.308000 +vertex_buffer: -0.159513 +vertex_buffer: 5.575268 +vertex_buffer: 0.740100 +vertex_buffer: 0.177600 +vertex_buffer: 1.986000 +vertex_buffer: 0.034865 +vertex_buffer: 5.609689 +vertex_buffer: 0.718400 +vertex_buffer: 0.146300 +vertex_buffer: 1.707000 +vertex_buffer: 0.305890 +vertex_buffer: 5.618190 +vertex_buffer: 0.691800 +vertex_buffer: 0.117600 +vertex_buffer: 1.471000 +vertex_buffer: 0.665717 +vertex_buffer: 5.575468 +vertex_buffer: 0.659200 +vertex_buffer: 0.090100 +vertex_buffer: 1.264000 +vertex_buffer: 1.088916 +vertex_buffer: 5.518081 +vertex_buffer: 0.620700 +vertex_buffer: 0.065000 +vertex_buffer: 0.847000 +vertex_buffer: 2.170980 +vertex_buffer: 5.465208 +vertex_buffer: 0.515400 +vertex_buffer: 0.027100 +vertex_buffer: 0.922999 +vertex_buffer: 1.947676 +vertex_buffer: 5.467464 +vertex_buffer: 0.538400 +vertex_buffer: 0.032000 +vertex_buffer: 0.807999 +vertex_buffer: 2.157453 +vertex_buffer: 5.482841 +vertex_buffer: 0.514700 +vertex_buffer: 0.030900 +vertex_buffer: 0.796000 +vertex_buffer: 2.333566 +vertex_buffer: 5.498341 +vertex_buffer: 0.497900 +vertex_buffer: 0.026800 +vertex_buffer: 0.719000 +vertex_buffer: 2.548583 +vertex_buffer: 5.588669 +vertex_buffer: 0.474800 +vertex_buffer: 0.031000 +vertex_buffer: 0.631000 +vertex_buffer: 2.767489 +vertex_buffer: 5.684450 +vertex_buffer: 0.452100 +vertex_buffer: 0.036800 +vertex_buffer: 0.313999 +vertex_buffer: 3.115831 +vertex_buffer: 5.733213 +vertex_buffer: 0.412000 +vertex_buffer: 0.031500 +vertex_buffer: 0.450000 +vertex_buffer: 3.063987 +vertex_buffer: 5.729444 +vertex_buffer: 0.423000 +vertex_buffer: 0.035000 +vertex_buffer: 0.182000 +vertex_buffer: 3.129776 +vertex_buffer: 5.735759 +vertex_buffer: 0.401700 +vertex_buffer: 0.027400 +vertex_buffer: 0.160685 +vertex_buffer: 3.986409 +vertex_buffer: 5.740629 +vertex_buffer: 0.284300 +vertex_buffer: 0.106000 +vertex_buffer: -0.000000 +vertex_buffer: 3.978112 +vertex_buffer: 5.743165 +vertex_buffer: 0.279500 +vertex_buffer: 0.092500 +vertex_buffer: 0.557000 +vertex_buffer: 4.010119 +vertex_buffer: 5.737586 +vertex_buffer: 0.295500 +vertex_buffer: 0.139600 +vertex_buffer: 0.309999 +vertex_buffer: 3.994118 +vertex_buffer: 5.738694 +vertex_buffer: 0.288800 +vertex_buffer: 0.118600 +vertex_buffer: 0.884999 +vertex_buffer: 4.051117 +vertex_buffer: 5.738481 +vertex_buffer: 0.303600 +vertex_buffer: 0.168000 +vertex_buffer: 1.100000 +vertex_buffer: 4.085115 +vertex_buffer: 5.740980 +vertex_buffer: 0.308100 +vertex_buffer: 0.186900 +vertex_buffer: 1.408000 +vertex_buffer: 4.138110 +vertex_buffer: 5.742989 +vertex_buffer: 0.313100 +vertex_buffer: 0.214600 +vertex_buffer: 1.825000 +vertex_buffer: 4.212113 +vertex_buffer: 5.742072 +vertex_buffer: 0.317400 +vertex_buffer: 0.252500 +vertex_buffer: 2.364000 +vertex_buffer: 4.287131 +vertex_buffer: 5.725549 +vertex_buffer: 0.321000 +vertex_buffer: 0.301800 +vertex_buffer: 3.039000 +vertex_buffer: 4.343173 +vertex_buffer: 5.693288 +vertex_buffer: 0.325800 +vertex_buffer: 0.363200 +vertex_buffer: 3.712000 +vertex_buffer: 4.374225 +vertex_buffer: 5.649136 +vertex_buffer: 0.332200 +vertex_buffer: 0.424300 +vertex_buffer: 4.241000 +vertex_buffer: 4.376288 +vertex_buffer: 5.597692 +vertex_buffer: 0.338400 +vertex_buffer: 0.472600 +vertex_buffer: 7.047000 +vertex_buffer: 3.937823 +vertex_buffer: 5.156792 +vertex_buffer: 0.410100 +vertex_buffer: 0.735500 +vertex_buffer: 6.661000 +vertex_buffer: 4.029736 +vertex_buffer: 5.228822 +vertex_buffer: 0.393600 +vertex_buffer: 0.695500 +vertex_buffer: 6.937000 +vertex_buffer: 2.624609 +vertex_buffer: 5.145157 +vertex_buffer: 0.573600 +vertex_buffer: 0.719000 +vertex_buffer: 0.549000 +vertex_buffer: 2.943340 +vertex_buffer: 5.723495 +vertex_buffer: 0.435100 +vertex_buffer: 0.038000 +vertex_buffer: -0.000000 +vertex_buffer: 3.141714 +vertex_buffer: 5.739735 +vertex_buffer: 0.386900 +vertex_buffer: 0.021100 +vertex_buffer: 7.111000 +vertex_buffer: 3.918920 +vertex_buffer: 5.078127 +vertex_buffer: 0.412900 +vertex_buffer: 0.748000 +vertex_buffer: 7.092000 +vertex_buffer: 3.924858 +vertex_buffer: 5.128843 +vertex_buffer: 0.412000 +vertex_buffer: 0.741800 +vertex_buffer: 7.115000 +vertex_buffer: 3.949918 +vertex_buffer: 5.079048 +vertex_buffer: 0.415800 +vertex_buffer: 0.747800 +vertex_buffer: 7.113000 +vertex_buffer: 3.917983 +vertex_buffer: 5.025514 +vertex_buffer: 0.413300 +vertex_buffer: 0.753900 +vertex_buffer: 7.118000 +vertex_buffer: 3.948984 +vertex_buffer: 5.024453 +vertex_buffer: 0.416100 +vertex_buffer: 0.753900 +vertex_buffer: 7.355000 +vertex_buffer: 3.828099 +vertex_buffer: 4.930316 +vertex_buffer: 0.442100 +vertex_buffer: 0.779000 +vertex_buffer: 7.349000 +vertex_buffer: 3.848097 +vertex_buffer: 4.931756 +vertex_buffer: 0.437800 +vertex_buffer: 0.777500 +vertex_buffer: 7.351000 +vertex_buffer: 3.790099 +vertex_buffer: 4.929286 +vertex_buffer: 0.447600 +vertex_buffer: 0.780600 +vertex_buffer: 7.307000 +vertex_buffer: 3.363702 +vertex_buffer: 4.924161 +vertex_buffer: 0.498000 +vertex_buffer: 0.779100 +vertex_buffer: 7.330000 +vertex_buffer: 3.384200 +vertex_buffer: 4.920932 +vertex_buffer: 0.493500 +vertex_buffer: 0.779900 +vertex_buffer: 7.257000 +vertex_buffer: 3.334402 +vertex_buffer: 4.931817 +vertex_buffer: 0.504300 +vertex_buffer: 0.777500 +vertex_buffer: 7.342000 +vertex_buffer: 3.491108 +vertex_buffer: 4.922235 +vertex_buffer: 0.479000 +vertex_buffer: 0.781900 +vertex_buffer: 7.344000 +vertex_buffer: 3.549107 +vertex_buffer: 4.923484 +vertex_buffer: 0.472400 +vertex_buffer: 0.782100 +vertex_buffer: 7.116000 +vertex_buffer: 3.915038 +vertex_buffer: 4.979849 +vertex_buffer: 0.414600 +vertex_buffer: 0.760100 +vertex_buffer: 7.120000 +vertex_buffer: 3.945038 +vertex_buffer: 4.979739 +vertex_buffer: 0.416700 +vertex_buffer: 0.758700 +vertex_buffer: 7.137000 +vertex_buffer: 3.943048 +vertex_buffer: 4.972868 +vertex_buffer: 0.418000 +vertex_buffer: 0.760000 +vertex_buffer: 7.136000 +vertex_buffer: 3.910048 +vertex_buffer: 4.971928 +vertex_buffer: 0.416500 +vertex_buffer: 0.762500 +vertex_buffer: 7.168000 +vertex_buffer: 3.936054 +vertex_buffer: 4.966967 +vertex_buffer: 0.420700 +vertex_buffer: 0.762000 +vertex_buffer: 7.167000 +vertex_buffer: 3.903056 +vertex_buffer: 4.966033 +vertex_buffer: 0.419600 +vertex_buffer: 0.764900 +vertex_buffer: 7.339000 +vertex_buffer: 3.410543 +vertex_buffer: 4.919605 +vertex_buffer: 0.489200 +vertex_buffer: 0.780700 +vertex_buffer: 7.340000 +vertex_buffer: 3.445111 +vertex_buffer: 4.921282 +vertex_buffer: 0.484500 +vertex_buffer: 0.781500 +vertex_buffer: 7.081000 +vertex_buffer: 3.225890 +vertex_buffer: 4.958372 +vertex_buffer: 0.523200 +vertex_buffer: 0.771200 +vertex_buffer: 7.329000 +vertex_buffer: 3.862093 +vertex_buffer: 4.935845 +vertex_buffer: 0.433300 +vertex_buffer: 0.775300 +vertex_buffer: 7.348000 +vertex_buffer: 3.655105 +vertex_buffer: 4.925934 +vertex_buffer: 0.461400 +vertex_buffer: 0.781700 +vertex_buffer: 7.226000 +vertex_buffer: 3.234846 +vertex_buffer: 5.228929 +vertex_buffer: 0.510100 +vertex_buffer: 0.744300 +vertex_buffer: 7.266000 +vertex_buffer: 3.262676 +vertex_buffer: 5.181256 +vertex_buffer: 0.506400 +vertex_buffer: 0.749800 +vertex_buffer: 7.182000 +vertex_buffer: 3.205980 +vertex_buffer: 5.273643 +vertex_buffer: 0.513500 +vertex_buffer: 0.738500 +vertex_buffer: 1.118000 +vertex_buffer: 1.338903 +vertex_buffer: 6.002359 +vertex_buffer: 0.575400 +vertex_buffer: 0.093900 +vertex_buffer: 1.142000 +vertex_buffer: 1.341919 +vertex_buffer: 6.002557 +vertex_buffer: 0.575400 +vertex_buffer: 0.095400 +vertex_buffer: 1.103000 +vertex_buffer: 1.337232 +vertex_buffer: 5.996234 +vertex_buffer: 0.575600 +vertex_buffer: 0.093100 +vertex_buffer: 0.806000 +vertex_buffer: 2.138176 +vertex_buffer: 5.548845 +vertex_buffer: 0.514500 +vertex_buffer: 0.036600 +vertex_buffer: 1.367000 +vertex_buffer: 1.388032 +vertex_buffer: 5.704676 +vertex_buffer: 0.568800 +vertex_buffer: 0.139600 +vertex_buffer: 1.098000 +vertex_buffer: 1.337325 +vertex_buffer: 5.977184 +vertex_buffer: 0.576200 +vertex_buffer: 0.092200 +vertex_buffer: 1.332000 +vertex_buffer: 1.365340 +vertex_buffer: 5.998475 +vertex_buffer: 0.575300 +vertex_buffer: 0.112000 +vertex_buffer: 1.071000 +vertex_buffer: 1.551685 +vertex_buffer: 5.483383 +vertex_buffer: 0.577000 +vertex_buffer: 0.044800 +vertex_buffer: 7.168000 +vertex_buffer: 3.279676 +vertex_buffer: 4.945478 +vertex_buffer: 0.514000 +vertex_buffer: 0.774200 +vertex_buffer: 7.141000 +vertex_buffer: 3.958912 +vertex_buffer: 5.084465 +vertex_buffer: 0.418600 +vertex_buffer: 0.748300 +vertex_buffer: 7.142000 +vertex_buffer: 3.957986 +vertex_buffer: 5.022904 +vertex_buffer: 0.418500 +vertex_buffer: 0.754600 +vertex_buffer: 7.112000 +vertex_buffer: 3.917019 +vertex_buffer: 4.995733 +vertex_buffer: 0.413700 +vertex_buffer: 0.757700 +vertex_buffer: 7.116000 +vertex_buffer: 3.949022 +vertex_buffer: 4.993701 +vertex_buffer: 0.416400 +vertex_buffer: 0.757200 +vertex_buffer: 7.141000 +vertex_buffer: 3.958028 +vertex_buffer: 4.989184 +vertex_buffer: 0.418500 +vertex_buffer: 0.758000 +vertex_buffer: 0.323866 +vertex_buffer: 3.364760 +vertex_buffer: 6.099101 +vertex_buffer: 0.376200 +vertex_buffer: 0.082600 +vertex_buffer: 0.573225 +vertex_buffer: 3.334350 +vertex_buffer: 6.083103 +vertex_buffer: 0.387400 +vertex_buffer: 0.102200 +vertex_buffer: 0.167873 +vertex_buffer: 3.366989 +vertex_buffer: 6.104354 +vertex_buffer: 0.368800 +vertex_buffer: 0.070800 +vertex_buffer: -0.000000 +vertex_buffer: 3.369389 +vertex_buffer: 6.110551 +vertex_buffer: 0.360900 +vertex_buffer: 0.058000 +vertex_buffer: -5.567000 +vertex_buffer: 4.268441 +vertex_buffer: 5.472582 +vertex_buffer: 0.370600 +vertex_buffer: 0.590500 +vertex_buffer: -5.128000 +vertex_buffer: 4.330357 +vertex_buffer: 5.541740 +vertex_buffer: 0.361200 +vertex_buffer: 0.550600 +vertex_buffer: -5.129000 +vertex_buffer: 4.348364 +vertex_buffer: 5.535068 +vertex_buffer: 0.359600 +vertex_buffer: 0.551300 +vertex_buffer: -5.571000 +vertex_buffer: 4.286449 +vertex_buffer: 5.464946 +vertex_buffer: 0.369000 +vertex_buffer: 0.591400 +vertex_buffer: -5.541000 +vertex_buffer: 4.185438 +vertex_buffer: 5.475054 +vertex_buffer: 0.377700 +vertex_buffer: 0.587200 +vertex_buffer: -5.118000 +vertex_buffer: 4.243356 +vertex_buffer: 5.541220 +vertex_buffer: 0.368700 +vertex_buffer: 0.548600 +vertex_buffer: -4.708000 +vertex_buffer: 4.367285 +vertex_buffer: 5.600235 +vertex_buffer: 0.353600 +vertex_buffer: 0.512400 +vertex_buffer: -4.708000 +vertex_buffer: 4.384293 +vertex_buffer: 5.593466 +vertex_buffer: 0.352000 +vertex_buffer: 0.513000 +vertex_buffer: -4.253000 +vertex_buffer: 4.387217 +vertex_buffer: 5.656703 +vertex_buffer: 0.346800 +vertex_buffer: 0.471000 +vertex_buffer: -4.253000 +vertex_buffer: 4.405225 +vertex_buffer: 5.649831 +vertex_buffer: 0.345200 +vertex_buffer: 0.471700 +vertex_buffer: -4.704000 +vertex_buffer: 4.280284 +vertex_buffer: 5.600062 +vertex_buffer: 0.361200 +vertex_buffer: 0.510800 +vertex_buffer: -4.251000 +vertex_buffer: 4.301218 +vertex_buffer: 5.656061 +vertex_buffer: 0.354400 +vertex_buffer: 0.469500 +vertex_buffer: -3.713000 +vertex_buffer: 4.386153 +vertex_buffer: 5.709153 +vertex_buffer: 0.340400 +vertex_buffer: 0.422100 +vertex_buffer: -3.712000 +vertex_buffer: 4.403163 +vertex_buffer: 5.701223 +vertex_buffer: 0.338700 +vertex_buffer: 0.422600 +vertex_buffer: -3.035000 +vertex_buffer: 4.357099 +vertex_buffer: 5.754267 +vertex_buffer: 0.334300 +vertex_buffer: 0.360700 +vertex_buffer: -3.035000 +vertex_buffer: 4.374107 +vertex_buffer: 5.746506 +vertex_buffer: 0.332700 +vertex_buffer: 0.361400 +vertex_buffer: -3.711000 +vertex_buffer: 4.300154 +vertex_buffer: 5.708760 +vertex_buffer: 0.348000 +vertex_buffer: 0.420400 +vertex_buffer: -3.034000 +vertex_buffer: 4.271099 +vertex_buffer: 5.753122 +vertex_buffer: 0.341900 +vertex_buffer: 0.358900 +vertex_buffer: -2.359000 +vertex_buffer: 4.303060 +vertex_buffer: 5.785590 +vertex_buffer: 0.329900 +vertex_buffer: 0.299400 +vertex_buffer: -2.358000 +vertex_buffer: 4.321068 +vertex_buffer: 5.778964 +vertex_buffer: 0.328100 +vertex_buffer: 0.300100 +vertex_buffer: -1.823000 +vertex_buffer: 4.228040 +vertex_buffer: 5.802235 +vertex_buffer: 0.326400 +vertex_buffer: 0.250600 +vertex_buffer: -1.820000 +vertex_buffer: 4.245048 +vertex_buffer: 5.795635 +vertex_buffer: 0.324600 +vertex_buffer: 0.251100 +vertex_buffer: -2.362000 +vertex_buffer: 4.217062 +vertex_buffer: 5.783886 +vertex_buffer: 0.337600 +vertex_buffer: 0.297700 +vertex_buffer: -1.836000 +vertex_buffer: 4.142041 +vertex_buffer: 5.800324 +vertex_buffer: 0.334400 +vertex_buffer: 0.249400 +vertex_buffer: -1.410000 +vertex_buffer: 4.152036 +vertex_buffer: 5.806158 +vertex_buffer: 0.322400 +vertex_buffer: 0.212900 +vertex_buffer: -1.405000 +vertex_buffer: 4.170044 +vertex_buffer: 5.799617 +vertex_buffer: 0.320500 +vertex_buffer: 0.213200 +vertex_buffer: -5.819000 +vertex_buffer: 3.920209 +vertex_buffer: 5.663535 +vertex_buffer: 0.421100 +vertex_buffer: 0.602200 +vertex_buffer: -5.486000 +vertex_buffer: 3.986141 +vertex_buffer: 5.718381 +vertex_buffer: 0.412000 +vertex_buffer: 0.572000 +vertex_buffer: -5.551000 +vertex_buffer: 4.106152 +vertex_buffer: 5.709693 +vertex_buffer: 0.402100 +vertex_buffer: 0.579100 +vertex_buffer: -6.010000 +vertex_buffer: 4.027245 +vertex_buffer: 5.632739 +vertex_buffer: 0.413400 +vertex_buffer: 0.620600 +vertex_buffer: -6.101000 +vertex_buffer: 3.839266 +vertex_buffer: 5.615214 +vertex_buffer: 0.431300 +vertex_buffer: 0.627700 +vertex_buffer: -6.386000 +vertex_buffer: 3.956323 +vertex_buffer: 5.569576 +vertex_buffer: 0.423500 +vertex_buffer: 0.654600 +vertex_buffer: -6.533000 +vertex_buffer: 3.412464 +vertex_buffer: 5.527380 +vertex_buffer: 0.474800 +vertex_buffer: 0.665700 +vertex_buffer: -6.428000 +vertex_buffer: 3.592345 +vertex_buffer: 5.550429 +vertex_buffer: 0.457600 +vertex_buffer: 0.656300 +vertex_buffer: -6.627000 +vertex_buffer: 3.198302 +vertex_buffer: 5.510076 +vertex_buffer: 0.495800 +vertex_buffer: 0.673400 +vertex_buffer: -6.595000 +vertex_buffer: 3.198286 +vertex_buffer: 5.512281 +vertex_buffer: 0.494800 +vertex_buffer: 0.670900 +vertex_buffer: -6.619000 +vertex_buffer: 2.959405 +vertex_buffer: 5.501948 +vertex_buffer: 0.516700 +vertex_buffer: 0.670900 +vertex_buffer: -6.652000 +vertex_buffer: 2.957454 +vertex_buffer: 5.499708 +vertex_buffer: 0.517700 +vertex_buffer: 0.673200 +vertex_buffer: -6.653000 +vertex_buffer: 2.716597 +vertex_buffer: 5.493766 +vertex_buffer: 0.538700 +vertex_buffer: 0.668700 +vertex_buffer: -6.617000 +vertex_buffer: 2.717522 +vertex_buffer: 5.497011 +vertex_buffer: 0.537900 +vertex_buffer: 0.666200 +vertex_buffer: -6.420000 +vertex_buffer: 1.682484 +vertex_buffer: 5.510983 +vertex_buffer: 0.615100 +vertex_buffer: 0.613800 +vertex_buffer: -6.383000 +vertex_buffer: 1.687333 +vertex_buffer: 5.513467 +vertex_buffer: 0.614200 +vertex_buffer: 0.610700 +vertex_buffer: -6.539000 +vertex_buffer: 2.186655 +vertex_buffer: 5.498873 +vertex_buffer: 0.579700 +vertex_buffer: 0.642800 +vertex_buffer: -6.578000 +vertex_buffer: 2.182802 +vertex_buffer: 5.495497 +vertex_buffer: 0.580600 +vertex_buffer: 0.645900 +vertex_buffer: -6.214000 +vertex_buffer: 1.153944 +vertex_buffer: 5.529571 +vertex_buffer: 0.649300 +vertex_buffer: 0.575600 +vertex_buffer: -6.180000 +vertex_buffer: 1.161807 +vertex_buffer: 5.531222 +vertex_buffer: 0.648100 +vertex_buffer: 0.572700 +vertex_buffer: -6.006000 +vertex_buffer: 0.759837 +vertex_buffer: 5.546113 +vertex_buffer: 0.674000 +vertex_buffer: 0.543200 +vertex_buffer: -5.976000 +vertex_buffer: 0.774659 +vertex_buffer: 5.548143 +vertex_buffer: 0.672000 +vertex_buffer: 0.540800 +vertex_buffer: -5.738000 +vertex_buffer: 0.483219 +vertex_buffer: 5.569363 +vertex_buffer: 0.689400 +vertex_buffer: 0.510800 +vertex_buffer: -5.765000 +vertex_buffer: 0.459438 +vertex_buffer: 5.566656 +vertex_buffer: 0.692000 +vertex_buffer: 0.512700 +vertex_buffer: -5.435000 +vertex_buffer: 0.241572 +vertex_buffer: 5.597222 +vertex_buffer: 0.703400 +vertex_buffer: 0.477900 +vertex_buffer: -5.459000 +vertex_buffer: 0.213714 +vertex_buffer: 5.594969 +vertex_buffer: 0.706300 +vertex_buffer: 0.479400 +vertex_buffer: -5.043000 +vertex_buffer: 0.047655 +vertex_buffer: 5.633954 +vertex_buffer: 0.714700 +vertex_buffer: 0.439400 +vertex_buffer: -5.060000 +vertex_buffer: 0.015697 +vertex_buffer: 5.632053 +vertex_buffer: 0.717800 +vertex_buffer: 0.440200 +vertex_buffer: -4.534000 +vertex_buffer: -0.103409 +vertex_buffer: 5.679254 +vertex_buffer: 0.723700 +vertex_buffer: 0.391800 +vertex_buffer: -4.543000 +vertex_buffer: -0.140581 +vertex_buffer: 5.678630 +vertex_buffer: 0.726900 +vertex_buffer: 0.391800 +vertex_buffer: -4.005000 +vertex_buffer: -0.195870 +vertex_buffer: 5.728422 +vertex_buffer: 0.728400 +vertex_buffer: 0.343300 +vertex_buffer: -4.007000 +vertex_buffer: -0.236085 +vertex_buffer: 5.727403 +vertex_buffer: 0.731900 +vertex_buffer: 0.342800 +vertex_buffer: -1.098000 +vertex_buffer: 4.116043 +vertex_buffer: 5.800600 +vertex_buffer: 0.315600 +vertex_buffer: 0.185500 +vertex_buffer: -1.103000 +vertex_buffer: 4.098034 +vertex_buffer: 5.808094 +vertex_buffer: 0.317600 +vertex_buffer: 0.185200 +vertex_buffer: -1.432000 +vertex_buffer: 4.066038 +vertex_buffer: 5.804105 +vertex_buffer: 0.330600 +vertex_buffer: 0.212100 +vertex_buffer: -1.126000 +vertex_buffer: 4.010036 +vertex_buffer: 5.805807 +vertex_buffer: 0.325900 +vertex_buffer: 0.184200 +vertex_buffer: -0.883999 +vertex_buffer: 4.081041 +vertex_buffer: 5.802060 +vertex_buffer: 0.311200 +vertex_buffer: 0.166300 +vertex_buffer: -0.887998 +vertex_buffer: 4.062030 +vertex_buffer: 5.809504 +vertex_buffer: 0.313200 +vertex_buffer: 0.166000 +vertex_buffer: -0.162758 +vertex_buffer: 3.804688 +vertex_buffer: 6.122684 +vertex_buffer: 0.333600 +vertex_buffer: 0.088700 +vertex_buffer: -0.165816 +vertex_buffer: 3.544578 +vertex_buffer: 6.111334 +vertex_buffer: 0.354600 +vertex_buffer: 0.078000 +vertex_buffer: -0.584504 +vertex_buffer: 3.526286 +vertex_buffer: 6.088815 +vertex_buffer: 0.371900 +vertex_buffer: 0.112100 +vertex_buffer: -0.319898 +vertex_buffer: 3.542312 +vertex_buffer: 6.106232 +vertex_buffer: 0.361400 +vertex_buffer: 0.090000 +vertex_buffer: -0.313999 +vertex_buffer: 3.805656 +vertex_buffer: 6.117985 +vertex_buffer: 0.339400 +vertex_buffer: 0.100800 +vertex_buffer: -0.582000 +vertex_buffer: 3.814673 +vertex_buffer: 6.103284 +vertex_buffer: 0.347300 +vertex_buffer: 0.124400 +vertex_buffer: -0.547999 +vertex_buffer: 2.930367 +vertex_buffer: 6.069046 +vertex_buffer: 0.423500 +vertex_buffer: 0.076700 +vertex_buffer: -0.522999 +vertex_buffer: 2.920398 +vertex_buffer: 6.062136 +vertex_buffer: 0.424300 +vertex_buffer: 0.072800 +vertex_buffer: -0.432000 +vertex_buffer: 3.031033 +vertex_buffer: 6.071945 +vertex_buffer: 0.411000 +vertex_buffer: 0.070300 +vertex_buffer: -0.452999 +vertex_buffer: 3.052091 +vertex_buffer: 6.080023 +vertex_buffer: 0.409300 +vertex_buffer: 0.074700 +vertex_buffer: -0.591999 +vertex_buffer: 2.935434 +vertex_buffer: 6.066682 +vertex_buffer: 0.423800 +vertex_buffer: 0.082000 +vertex_buffer: -0.500000 +vertex_buffer: 3.112521 +vertex_buffer: 6.080370 +vertex_buffer: 0.405500 +vertex_buffer: 0.082900 +vertex_buffer: -0.664999 +vertex_buffer: 2.741312 +vertex_buffer: 6.054260 +vertex_buffer: 0.442900 +vertex_buffer: 0.080800 +vertex_buffer: -0.625000 +vertex_buffer: 2.745313 +vertex_buffer: 6.055617 +vertex_buffer: 0.442100 +vertex_buffer: 0.076800 +vertex_buffer: -0.600999 +vertex_buffer: 2.743412 +vertex_buffer: 6.049767 +vertex_buffer: 0.442400 +vertex_buffer: 0.073800 +vertex_buffer: -0.797000 +vertex_buffer: 2.211378 +vertex_buffer: 6.028312 +vertex_buffer: 0.493200 +vertex_buffer: 0.078100 +vertex_buffer: -0.889000 +vertex_buffer: 1.937704 +vertex_buffer: 6.020942 +vertex_buffer: 0.519200 +vertex_buffer: 0.081200 +vertex_buffer: -0.872000 +vertex_buffer: 1.935933 +vertex_buffer: 6.014896 +vertex_buffer: 0.519600 +vertex_buffer: 0.080100 +vertex_buffer: -0.777999 +vertex_buffer: 2.209563 +vertex_buffer: 6.022314 +vertex_buffer: 0.493700 +vertex_buffer: 0.076500 +vertex_buffer: -0.825999 +vertex_buffer: 2.213393 +vertex_buffer: 6.028286 +vertex_buffer: 0.493200 +vertex_buffer: 0.080600 +vertex_buffer: -0.915999 +vertex_buffer: 1.941734 +vertex_buffer: 6.021025 +vertex_buffer: 0.519000 +vertex_buffer: 0.083200 +vertex_buffer: -1.019000 +vertex_buffer: 1.656937 +vertex_buffer: 6.012816 +vertex_buffer: 0.546000 +vertex_buffer: 0.088000 +vertex_buffer: -0.994000 +vertex_buffer: 1.653878 +vertex_buffer: 6.013682 +vertex_buffer: 0.546000 +vertex_buffer: 0.086400 +vertex_buffer: -0.978000 +vertex_buffer: 1.652161 +vertex_buffer: 6.007595 +vertex_buffer: 0.546300 +vertex_buffer: 0.085500 +vertex_buffer: -1.259000 +vertex_buffer: 0.989106 +vertex_buffer: 5.981474 +vertex_buffer: 0.607700 +vertex_buffer: 0.104000 +vertex_buffer: -1.273000 +vertex_buffer: 0.991726 +vertex_buffer: 5.987666 +vertex_buffer: 0.607500 +vertex_buffer: 0.104800 +vertex_buffer: -1.470000 +vertex_buffer: 0.609648 +vertex_buffer: 5.964523 +vertex_buffer: 0.642600 +vertex_buffer: 0.119900 +vertex_buffer: -1.457000 +vertex_buffer: 0.605091 +vertex_buffer: 5.958188 +vertex_buffer: 0.642900 +vertex_buffer: 0.119100 +vertex_buffer: -1.296000 +vertex_buffer: 0.995807 +vertex_buffer: 5.986959 +vertex_buffer: 0.607500 +vertex_buffer: 0.106300 +vertex_buffer: -1.490000 +vertex_buffer: 0.618772 +vertex_buffer: 5.964130 +vertex_buffer: 0.642200 +vertex_buffer: 0.121200 +vertex_buffer: -1.726000 +vertex_buffer: 0.268501 +vertex_buffer: 5.934266 +vertex_buffer: 0.674500 +vertex_buffer: 0.140600 +vertex_buffer: -1.708000 +vertex_buffer: 0.254355 +vertex_buffer: 5.934185 +vertex_buffer: 0.675200 +vertex_buffer: 0.139400 +vertex_buffer: -1.697000 +vertex_buffer: 0.246832 +vertex_buffer: 5.927583 +vertex_buffer: 0.675700 +vertex_buffer: 0.138700 +vertex_buffer: -1.990000 +vertex_buffer: -0.014744 +vertex_buffer: 5.901462 +vertex_buffer: 0.700200 +vertex_buffer: 0.163400 +vertex_buffer: -1.981000 +vertex_buffer: -0.024212 +vertex_buffer: 5.894596 +vertex_buffer: 0.700700 +vertex_buffer: 0.162700 +vertex_buffer: -2.005000 +vertex_buffer: 0.001400 +vertex_buffer: 5.901891 +vertex_buffer: 0.699300 +vertex_buffer: 0.164500 +vertex_buffer: -2.328000 +vertex_buffer: -0.193438 +vertex_buffer: 5.866342 +vertex_buffer: 0.718200 +vertex_buffer: 0.192400 +vertex_buffer: -2.316000 +vertex_buffer: -0.211562 +vertex_buffer: 5.865541 +vertex_buffer: 0.719200 +vertex_buffer: 0.191400 +vertex_buffer: -2.308000 +vertex_buffer: -0.221987 +vertex_buffer: 5.858471 +vertex_buffer: 0.719700 +vertex_buffer: 0.190800 +vertex_buffer: -2.686000 +vertex_buffer: -0.346309 +vertex_buffer: 5.827715 +vertex_buffer: 0.733700 +vertex_buffer: 0.223400 +vertex_buffer: -2.680000 +vertex_buffer: -0.359701 +vertex_buffer: 5.820293 +vertex_buffer: 0.734400 +vertex_buffer: 0.222800 +vertex_buffer: -2.695000 +vertex_buffer: -0.325289 +vertex_buffer: 5.829885 +vertex_buffer: 0.732400 +vertex_buffer: 0.224300 +vertex_buffer: -3.097000 +vertex_buffer: -0.408205 +vertex_buffer: 5.791085 +vertex_buffer: 0.742400 +vertex_buffer: 0.259800 +vertex_buffer: -3.091000 +vertex_buffer: -0.431304 +vertex_buffer: 5.789650 +vertex_buffer: 0.744000 +vertex_buffer: 0.258900 +vertex_buffer: -3.087000 +vertex_buffer: -0.445664 +vertex_buffer: 5.782089 +vertex_buffer: 0.744900 +vertex_buffer: 0.258300 +vertex_buffer: -3.522000 +vertex_buffer: -0.475451 +vertex_buffer: 5.748738 +vertex_buffer: 0.750200 +vertex_buffer: 0.297200 +vertex_buffer: -3.520000 +vertex_buffer: -0.489680 +vertex_buffer: 5.740166 +vertex_buffer: 0.751200 +vertex_buffer: 0.296700 +vertex_buffer: -3.525000 +vertex_buffer: -0.450484 +vertex_buffer: 5.751408 +vertex_buffer: 0.748300 +vertex_buffer: 0.297900 +vertex_buffer: -4.017000 +vertex_buffer: -0.437417 +vertex_buffer: 5.704529 +vertex_buffer: 0.749800 +vertex_buffer: 0.342100 +vertex_buffer: -4.018000 +vertex_buffer: -0.462352 +vertex_buffer: 5.701867 +vertex_buffer: 0.751900 +vertex_buffer: 0.341500 +vertex_buffer: -4.018000 +vertex_buffer: -0.477554 +vertex_buffer: 5.693193 +vertex_buffer: 0.753200 +vertex_buffer: 0.341100 +vertex_buffer: -5.141000 +vertex_buffer: 4.193074 +vertex_buffer: 5.774485 +vertex_buffer: 0.390900 +vertex_buffer: 0.542500 +vertex_buffer: -5.143000 +vertex_buffer: 4.212080 +vertex_buffer: 5.768786 +vertex_buffer: 0.389300 +vertex_buffer: 0.543200 +vertex_buffer: -5.564000 +vertex_buffer: 4.155161 +vertex_buffer: 5.702558 +vertex_buffer: 0.398100 +vertex_buffer: 0.581200 +vertex_buffer: -5.562000 +vertex_buffer: 4.136154 +vertex_buffer: 5.709178 +vertex_buffer: 0.399700 +vertex_buffer: 0.580600 +vertex_buffer: -5.138000 +vertex_buffer: 4.162073 +vertex_buffer: 5.774951 +vertex_buffer: 0.393500 +vertex_buffer: 0.541700 +vertex_buffer: -4.727000 +vertex_buffer: 4.199002 +vertex_buffer: 5.833092 +vertex_buffer: 0.386300 +vertex_buffer: 0.504400 +vertex_buffer: -4.728000 +vertex_buffer: 4.230002 +vertex_buffer: 5.833516 +vertex_buffer: 0.383700 +vertex_buffer: 0.505100 +vertex_buffer: -4.728000 +vertex_buffer: 4.249009 +vertex_buffer: 5.826763 +vertex_buffer: 0.382000 +vertex_buffer: 0.505700 +vertex_buffer: -4.274000 +vertex_buffer: 4.250935 +vertex_buffer: 5.889763 +vertex_buffer: 0.377100 +vertex_buffer: 0.464000 +vertex_buffer: -4.274000 +vertex_buffer: 4.269942 +vertex_buffer: 5.883901 +vertex_buffer: 0.375400 +vertex_buffer: 0.464700 +vertex_buffer: -4.274000 +vertex_buffer: 4.219934 +vertex_buffer: 5.889513 +vertex_buffer: 0.379800 +vertex_buffer: 0.463200 +vertex_buffer: -3.731000 +vertex_buffer: 4.250870 +vertex_buffer: 5.942575 +vertex_buffer: 0.370800 +vertex_buffer: 0.414800 +vertex_buffer: -3.731000 +vertex_buffer: 4.219869 +vertex_buffer: 5.942428 +vertex_buffer: 0.373500 +vertex_buffer: 0.414000 +vertex_buffer: -3.731000 +vertex_buffer: 4.268877 +vertex_buffer: 5.936660 +vertex_buffer: 0.369100 +vertex_buffer: 0.415600 +vertex_buffer: -3.051000 +vertex_buffer: 4.221815 +vertex_buffer: 5.986576 +vertex_buffer: 0.365000 +vertex_buffer: 0.353200 +vertex_buffer: -3.050000 +vertex_buffer: 4.239822 +vertex_buffer: 5.981790 +vertex_buffer: 0.363300 +vertex_buffer: 0.354000 +vertex_buffer: -3.050000 +vertex_buffer: 4.189816 +vertex_buffer: 5.986144 +vertex_buffer: 0.367800 +vertex_buffer: 0.352100 +vertex_buffer: -2.377000 +vertex_buffer: 4.135779 +vertex_buffer: 6.017364 +vertex_buffer: 0.363900 +vertex_buffer: 0.290700 +vertex_buffer: -2.376000 +vertex_buffer: 4.167778 +vertex_buffer: 6.017976 +vertex_buffer: 0.361000 +vertex_buffer: 0.291700 +vertex_buffer: -2.376000 +vertex_buffer: 4.185784 +vertex_buffer: 6.013332 +vertex_buffer: 0.359300 +vertex_buffer: 0.292700 +vertex_buffer: -1.854000 +vertex_buffer: 4.091757 +vertex_buffer: 6.035276 +vertex_buffer: 0.359300 +vertex_buffer: 0.243600 +vertex_buffer: -1.851000 +vertex_buffer: 4.110763 +vertex_buffer: 6.029667 +vertex_buffer: 0.357100 +vertex_buffer: 0.244500 +vertex_buffer: -1.858000 +vertex_buffer: 4.060758 +vertex_buffer: 6.033613 +vertex_buffer: 0.362400 +vertex_buffer: 0.242700 +vertex_buffer: -1.464000 +vertex_buffer: 3.979738 +vertex_buffer: 6.050185 +vertex_buffer: 0.361500 +vertex_buffer: 0.205700 +vertex_buffer: -1.454000 +vertex_buffer: 4.014737 +vertex_buffer: 6.051946 +vertex_buffer: 0.357500 +vertex_buffer: 0.206300 +vertex_buffer: -1.448000 +vertex_buffer: 4.033742 +vertex_buffer: 6.046346 +vertex_buffer: 0.354900 +vertex_buffer: 0.207000 +vertex_buffer: -6.022000 +vertex_buffer: 4.101534 +vertex_buffer: 5.394998 +vertex_buffer: 0.389000 +vertex_buffer: 0.630700 +vertex_buffer: -6.013000 +vertex_buffer: 4.086517 +vertex_buffer: 5.408722 +vertex_buffer: 0.390600 +vertex_buffer: 0.629300 +vertex_buffer: -5.538000 +vertex_buffer: 4.167421 +vertex_buffer: 5.488696 +vertex_buffer: 0.379500 +vertex_buffer: 0.586200 +vertex_buffer: -6.077000 +vertex_buffer: 4.173544 +vertex_buffer: 5.387243 +vertex_buffer: 0.383200 +vertex_buffer: 0.636700 +vertex_buffer: -6.412000 +vertex_buffer: 4.027614 +vertex_buffer: 5.329050 +vertex_buffer: 0.398800 +vertex_buffer: 0.666000 +vertex_buffer: -6.396000 +vertex_buffer: 4.014595 +vertex_buffer: 5.344789 +vertex_buffer: 0.400200 +vertex_buffer: 0.663800 +vertex_buffer: -6.603000 +vertex_buffer: 3.981370 +vertex_buffer: 5.529604 +vertex_buffer: 0.423400 +vertex_buffer: 0.676200 +vertex_buffer: -6.636000 +vertex_buffer: 3.990376 +vertex_buffer: 5.524715 +vertex_buffer: 0.423500 +vertex_buffer: 0.679800 +vertex_buffer: -6.648000 +vertex_buffer: 3.980372 +vertex_buffer: 5.528382 +vertex_buffer: 0.425200 +vertex_buffer: 0.680600 +vertex_buffer: -6.609000 +vertex_buffer: 3.966365 +vertex_buffer: 5.534145 +vertex_buffer: 0.425500 +vertex_buffer: 0.676500 +vertex_buffer: -6.691000 +vertex_buffer: 4.014387 +vertex_buffer: 5.516231 +vertex_buffer: 0.423000 +vertex_buffer: 0.685400 +vertex_buffer: -6.709000 +vertex_buffer: 4.010384 +vertex_buffer: 5.519073 +vertex_buffer: 0.424100 +vertex_buffer: 0.686700 +vertex_buffer: -6.694000 +vertex_buffer: 3.950383 +vertex_buffer: 5.519313 +vertex_buffer: 0.429300 +vertex_buffer: 0.684400 +vertex_buffer: -1.149000 +vertex_buffer: 3.955715 +vertex_buffer: 6.069484 +vertex_buffer: 0.354100 +vertex_buffer: 0.177900 +vertex_buffer: -1.141000 +vertex_buffer: 3.976721 +vertex_buffer: 6.063966 +vertex_buffer: 0.351300 +vertex_buffer: 0.178400 +vertex_buffer: -1.165000 +vertex_buffer: 3.907718 +vertex_buffer: 6.067353 +vertex_buffer: 0.359400 +vertex_buffer: 0.177300 +vertex_buffer: -0.929000 +vertex_buffer: 3.915698 +vertex_buffer: 6.082634 +vertex_buffer: 0.350200 +vertex_buffer: 0.157500 +vertex_buffer: -0.948999 +vertex_buffer: 3.854702 +vertex_buffer: 6.080110 +vertex_buffer: 0.356600 +vertex_buffer: 0.156800 +vertex_buffer: -0.921000 +vertex_buffer: 3.939706 +vertex_buffer: 6.077227 +vertex_buffer: 0.347300 +vertex_buffer: 0.158000 +vertex_buffer: -0.160167 +vertex_buffer: 3.920615 +vertex_buffer: 5.816254 +vertex_buffer: 0.300600 +vertex_buffer: 0.100800 +vertex_buffer: -0.160167 +vertex_buffer: 3.903592 +vertex_buffer: 5.833806 +vertex_buffer: 0.302700 +vertex_buffer: 0.100100 +vertex_buffer: -0.159648 +vertex_buffer: 4.000685 +vertex_buffer: 5.818195 +vertex_buffer: 0.293800 +vertex_buffer: 0.103200 +vertex_buffer: -4.705000 +vertex_buffer: 4.390320 +vertex_buffer: 5.571572 +vertex_buffer: 0.350000 +vertex_buffer: 0.513500 +vertex_buffer: -4.698000 +vertex_buffer: 4.380351 +vertex_buffer: 5.546519 +vertex_buffer: 0.347500 +vertex_buffer: 0.513800 +vertex_buffer: -5.119000 +vertex_buffer: 4.343422 +vertex_buffer: 5.488109 +vertex_buffer: 0.355200 +vertex_buffer: 0.552300 +vertex_buffer: -5.127000 +vertex_buffer: 4.354390 +vertex_buffer: 5.513198 +vertex_buffer: 0.357600 +vertex_buffer: 0.551900 +vertex_buffer: -5.562000 +vertex_buffer: 4.280505 +vertex_buffer: 5.417939 +vertex_buffer: 0.364800 +vertex_buffer: 0.592800 +vertex_buffer: -5.569000 +vertex_buffer: 4.291474 +vertex_buffer: 5.443092 +vertex_buffer: 0.367200 +vertex_buffer: 0.592200 +vertex_buffer: -6.087000 +vertex_buffer: 4.193582 +vertex_buffer: 5.356695 +vertex_buffer: 0.380200 +vertex_buffer: 0.639300 +vertex_buffer: -6.080000 +vertex_buffer: 4.181612 +vertex_buffer: 5.331465 +vertex_buffer: 0.377800 +vertex_buffer: 0.640100 +vertex_buffer: -6.505000 +vertex_buffer: 4.091699 +vertex_buffer: 5.260408 +vertex_buffer: 0.390100 +vertex_buffer: 0.678900 +vertex_buffer: -6.513000 +vertex_buffer: 4.103667 +vertex_buffer: 5.285665 +vertex_buffer: 0.392400 +vertex_buffer: 0.678000 +vertex_buffer: -6.511000 +vertex_buffer: 4.099640 +vertex_buffer: 5.307516 +vertex_buffer: 0.394000 +vertex_buffer: 0.676700 +vertex_buffer: -6.087000 +vertex_buffer: 4.188555 +vertex_buffer: 5.378542 +vertex_buffer: 0.382000 +vertex_buffer: 0.638200 +vertex_buffer: -6.714000 +vertex_buffer: 4.059708 +vertex_buffer: 5.251531 +vertex_buffer: 0.400200 +vertex_buffer: 0.698200 +vertex_buffer: -6.705000 +vertex_buffer: 4.047737 +vertex_buffer: 5.227258 +vertex_buffer: 0.397800 +vertex_buffer: 0.699300 +vertex_buffer: -6.729000 +vertex_buffer: 4.042746 +vertex_buffer: 5.222074 +vertex_buffer: 0.399300 +vertex_buffer: 0.703000 +vertex_buffer: -6.739000 +vertex_buffer: 4.054713 +vertex_buffer: 5.247342 +vertex_buffer: 0.401700 +vertex_buffer: 0.702000 +vertex_buffer: -6.775000 +vertex_buffer: 4.032753 +vertex_buffer: 5.214735 +vertex_buffer: 0.401700 +vertex_buffer: 0.708600 +vertex_buffer: -6.783000 +vertex_buffer: 4.044723 +vertex_buffer: 5.240010 +vertex_buffer: 0.403900 +vertex_buffer: 0.707400 +vertex_buffer: -6.789000 +vertex_buffer: 4.044682 +vertex_buffer: 5.273914 +vertex_buffer: 0.406500 +vertex_buffer: 0.705900 +vertex_buffer: -6.744000 +vertex_buffer: 4.053670 +vertex_buffer: 5.282214 +vertex_buffer: 0.404400 +vertex_buffer: 0.700500 +vertex_buffer: -6.717000 +vertex_buffer: 4.056669 +vertex_buffer: 5.284348 +vertex_buffer: 0.402900 +vertex_buffer: 0.696400 +vertex_buffer: -7.386000 +vertex_buffer: 3.856103 +vertex_buffer: 4.927482 +vertex_buffer: 0.441600 +vertex_buffer: 0.775500 +vertex_buffer: -7.403000 +vertex_buffer: 3.869084 +vertex_buffer: 4.942898 +vertex_buffer: 0.441500 +vertex_buffer: 0.773500 +vertex_buffer: -7.399000 +vertex_buffer: 3.891082 +vertex_buffer: 4.944418 +vertex_buffer: 0.439200 +vertex_buffer: 0.772300 +vertex_buffer: -7.381000 +vertex_buffer: 3.878100 +vertex_buffer: 4.928988 +vertex_buffer: 0.438800 +vertex_buffer: 0.774100 +vertex_buffer: -7.384000 +vertex_buffer: 3.809105 +vertex_buffer: 4.926238 +vertex_buffer: 0.446700 +vertex_buffer: 0.777300 +vertex_buffer: -7.401000 +vertex_buffer: 3.821086 +vertex_buffer: 4.941622 +vertex_buffer: 0.446300 +vertex_buffer: 0.775100 +vertex_buffer: -7.409000 +vertex_buffer: 3.868048 +vertex_buffer: 4.971678 +vertex_buffer: 0.442300 +vertex_buffer: 0.771300 +vertex_buffer: -7.404000 +vertex_buffer: 3.891046 +vertex_buffer: 4.974203 +vertex_buffer: 0.439700 +vertex_buffer: 0.770200 +vertex_buffer: -7.391000 +vertex_buffer: 3.567091 +vertex_buffer: 4.936405 +vertex_buffer: 0.470800 +vertex_buffer: 0.777300 +vertex_buffer: -7.374000 +vertex_buffer: 3.562110 +vertex_buffer: 4.921218 +vertex_buffer: 0.471200 +vertex_buffer: 0.779400 +vertex_buffer: -7.370000 +vertex_buffer: 3.480112 +vertex_buffer: 4.919435 +vertex_buffer: 0.479700 +vertex_buffer: 0.779300 +vertex_buffer: -7.387000 +vertex_buffer: 3.475092 +vertex_buffer: 4.935404 +vertex_buffer: 0.480000 +vertex_buffer: 0.777000 +vertex_buffer: -7.392000 +vertex_buffer: 3.476057 +vertex_buffer: 4.964184 +vertex_buffer: 0.479900 +vertex_buffer: 0.774300 +vertex_buffer: -7.396000 +vertex_buffer: 3.567056 +vertex_buffer: 4.965171 +vertex_buffer: 0.471000 +vertex_buffer: 0.774500 +vertex_buffer: -7.200000 +vertex_buffer: 3.267001 +vertex_buffer: 4.942599 +vertex_buffer: 0.511600 +vertex_buffer: 0.772300 +vertex_buffer: -7.219000 +vertex_buffer: 3.261037 +vertex_buffer: 4.960480 +vertex_buffer: 0.510500 +vertex_buffer: 0.770100 +vertex_buffer: -7.300000 +vertex_buffer: 3.307002 +vertex_buffer: 4.947629 +vertex_buffer: 0.501900 +vertex_buffer: 0.772800 +vertex_buffer: -7.284000 +vertex_buffer: 3.314892 +vertex_buffer: 4.929779 +vertex_buffer: 0.502600 +vertex_buffer: 0.775000 +vertex_buffer: -7.140000 +vertex_buffer: 3.216015 +vertex_buffer: 4.973517 +vertex_buffer: 0.518700 +vertex_buffer: 0.767600 +vertex_buffer: -7.119000 +vertex_buffer: 3.219073 +vertex_buffer: 4.955586 +vertex_buffer: 0.520100 +vertex_buffer: 0.769600 +vertex_buffer: -7.148000 +vertex_buffer: 3.216721 +vertex_buffer: 5.011203 +vertex_buffer: 0.518300 +vertex_buffer: 0.763900 +vertex_buffer: -7.226000 +vertex_buffer: 3.261802 +vertex_buffer: 4.996186 +vertex_buffer: 0.510100 +vertex_buffer: 0.766600 +vertex_buffer: -7.306000 +vertex_buffer: 3.306853 +vertex_buffer: 4.979352 +vertex_buffer: 0.501700 +vertex_buffer: 0.769700 +vertex_buffer: -7.108000 +vertex_buffer: 3.089665 +vertex_buffer: 5.185693 +vertex_buffer: 0.529600 +vertex_buffer: 0.740700 +vertex_buffer: -7.082000 +vertex_buffer: 3.084179 +vertex_buffer: 5.147562 +vertex_buffer: 0.532900 +vertex_buffer: 0.743800 +vertex_buffer: -7.040000 +vertex_buffer: 2.917183 +vertex_buffer: 5.151618 +vertex_buffer: 0.547100 +vertex_buffer: 0.733700 +vertex_buffer: -7.057000 +vertex_buffer: 2.917507 +vertex_buffer: 5.194512 +vertex_buffer: 0.544200 +vertex_buffer: 0.730500 +vertex_buffer: -7.100000 +vertex_buffer: 3.160937 +vertex_buffer: 5.127523 +vertex_buffer: 0.526500 +vertex_buffer: 0.750500 +vertex_buffer: -7.164000 +vertex_buffer: 3.198917 +vertex_buffer: 5.165739 +vertex_buffer: 0.518500 +vertex_buffer: 0.748100 +vertex_buffer: -6.696000 +vertex_buffer: 1.657242 +vertex_buffer: 5.181634 +vertex_buffer: 0.644600 +vertex_buffer: 0.655700 +vertex_buffer: -6.488000 +vertex_buffer: 1.105705 +vertex_buffer: 5.200318 +vertex_buffer: 0.682600 +vertex_buffer: 0.614900 +vertex_buffer: -6.500000 +vertex_buffer: 1.098762 +vertex_buffer: 5.245932 +vertex_buffer: 0.679500 +vertex_buffer: 0.612400 +vertex_buffer: -6.709000 +vertex_buffer: 1.651988 +vertex_buffer: 5.227341 +vertex_buffer: 0.641700 +vertex_buffer: 0.653000 +vertex_buffer: -6.861000 +vertex_buffer: 2.196412 +vertex_buffer: 5.166074 +vertex_buffer: 0.604700 +vertex_buffer: 0.691300 +vertex_buffer: -6.874000 +vertex_buffer: 2.192830 +vertex_buffer: 5.211826 +vertex_buffer: 0.601900 +vertex_buffer: 0.688300 +vertex_buffer: -6.259000 +vertex_buffer: 0.640923 +vertex_buffer: 5.265498 +vertex_buffer: 0.708900 +vertex_buffer: 0.573700 +vertex_buffer: -6.248000 +vertex_buffer: 0.649427 +vertex_buffer: 5.219999 +vertex_buffer: 0.712100 +vertex_buffer: 0.575800 +vertex_buffer: -5.962000 +vertex_buffer: 0.282768 +vertex_buffer: 5.242886 +vertex_buffer: 0.734700 +vertex_buffer: 0.537600 +vertex_buffer: -5.972000 +vertex_buffer: 0.272725 +vertex_buffer: 5.289225 +vertex_buffer: 0.731300 +vertex_buffer: 0.535800 +vertex_buffer: -5.626000 +vertex_buffer: -0.011549 +vertex_buffer: 5.319041 +vertex_buffer: 0.747900 +vertex_buffer: 0.497000 +vertex_buffer: -5.616000 +vertex_buffer: -0.001235 +vertex_buffer: 5.273785 +vertex_buffer: 0.751500 +vertex_buffer: 0.498200 +vertex_buffer: -5.172000 +vertex_buffer: -0.212297 +vertex_buffer: 5.313697 +vertex_buffer: 0.764100 +vertex_buffer: 0.453200 +vertex_buffer: -5.180000 +vertex_buffer: -0.223953 +vertex_buffer: 5.359756 +vertex_buffer: 0.760300 +vertex_buffer: 0.452300 +vertex_buffer: -4.590000 +vertex_buffer: -0.361592 +vertex_buffer: 5.367642 +vertex_buffer: 0.772900 +vertex_buffer: 0.397200 +vertex_buffer: -3.991000 +vertex_buffer: -0.445829 +vertex_buffer: 5.426052 +vertex_buffer: 0.775800 +vertex_buffer: 0.340300 +vertex_buffer: -3.996000 +vertex_buffer: -0.458781 +vertex_buffer: 5.471938 +vertex_buffer: 0.771900 +vertex_buffer: 0.340400 +vertex_buffer: -4.597000 +vertex_buffer: -0.374536 +vertex_buffer: 5.414536 +vertex_buffer: 0.768800 +vertex_buffer: 0.396900 +vertex_buffer: -3.500000 +vertex_buffer: -0.470867 +vertex_buffer: 5.518908 +vertex_buffer: 0.769400 +vertex_buffer: 0.293800 +vertex_buffer: -3.496000 +vertex_buffer: -0.457892 +vertex_buffer: 5.473036 +vertex_buffer: 0.773200 +vertex_buffer: 0.293100 +vertex_buffer: -3.067000 +vertex_buffer: -0.415012 +vertex_buffer: 5.512823 +vertex_buffer: 0.766000 +vertex_buffer: 0.252200 +vertex_buffer: -3.068000 +vertex_buffer: -0.427044 +vertex_buffer: 5.559814 +vertex_buffer: 0.762300 +vertex_buffer: 0.253200 +vertex_buffer: -2.663000 +vertex_buffer: -0.340722 +vertex_buffer: 5.599068 +vertex_buffer: 0.750600 +vertex_buffer: 0.215500 +vertex_buffer: -2.662000 +vertex_buffer: -0.328805 +vertex_buffer: 5.552051 +vertex_buffer: 0.754000 +vertex_buffer: 0.213900 +vertex_buffer: -2.293000 +vertex_buffer: -0.193007 +vertex_buffer: 5.589015 +vertex_buffer: 0.737400 +vertex_buffer: 0.179300 +vertex_buffer: -2.293000 +vertex_buffer: -0.203762 +vertex_buffer: 5.636180 +vertex_buffer: 0.734400 +vertex_buffer: 0.181300 +vertex_buffer: -1.966000 +vertex_buffer: -0.007189 +vertex_buffer: 5.672222 +vertex_buffer: 0.713500 +vertex_buffer: 0.151100 +vertex_buffer: -1.968000 +vertex_buffer: 0.003313 +vertex_buffer: 5.624982 +vertex_buffer: 0.716200 +vertex_buffer: 0.148600 +vertex_buffer: -1.685000 +vertex_buffer: 0.275446 +vertex_buffer: 5.633972 +vertex_buffer: 0.690100 +vertex_buffer: 0.120500 +vertex_buffer: -1.683000 +vertex_buffer: 0.265122 +vertex_buffer: 5.683293 +vertex_buffer: 0.687700 +vertex_buffer: 0.123700 +vertex_buffer: -1.230000 +vertex_buffer: 1.052009 +vertex_buffer: 5.594151 +vertex_buffer: 0.617900 +vertex_buffer: 0.073000 +vertex_buffer: -1.442000 +vertex_buffer: 0.628247 +vertex_buffer: 5.646198 +vertex_buffer: 0.655700 +vertex_buffer: 0.097100 +vertex_buffer: -1.445000 +vertex_buffer: 0.637469 +vertex_buffer: 5.591790 +vertex_buffer: 0.658000 +vertex_buffer: 0.093300 +vertex_buffer: -1.233000 +vertex_buffer: 1.062901 +vertex_buffer: 5.534851 +vertex_buffer: 0.619700 +vertex_buffer: 0.068500 +vertex_buffer: -0.886000 +vertex_buffer: 1.907435 +vertex_buffer: 5.549905 +vertex_buffer: 0.537400 +vertex_buffer: 0.041500 +vertex_buffer: -1.036000 +vertex_buffer: 1.512598 +vertex_buffer: 5.563668 +vertex_buffer: 0.575200 +vertex_buffer: 0.053800 +vertex_buffer: -1.037000 +vertex_buffer: 1.530892 +vertex_buffer: 5.500604 +vertex_buffer: 0.576100 +vertex_buffer: 0.048400 +vertex_buffer: -0.886000 +vertex_buffer: 1.931060 +vertex_buffer: 5.484957 +vertex_buffer: 0.537500 +vertex_buffer: 0.035700 +vertex_buffer: -0.680000 +vertex_buffer: 2.541166 +vertex_buffer: 5.606604 +vertex_buffer: 0.474100 +vertex_buffer: 0.035000 +vertex_buffer: -0.595000 +vertex_buffer: 2.760144 +vertex_buffer: 5.703512 +vertex_buffer: 0.451200 +vertex_buffer: 0.040500 +vertex_buffer: -0.588999 +vertex_buffer: 2.757072 +vertex_buffer: 5.759195 +vertex_buffer: 0.449400 +vertex_buffer: 0.045700 +vertex_buffer: -0.672999 +vertex_buffer: 2.535648 +vertex_buffer: 5.667060 +vertex_buffer: 0.472600 +vertex_buffer: 0.040600 +vertex_buffer: -0.756000 +vertex_buffer: 2.322095 +vertex_buffer: 5.516086 +vertex_buffer: 0.497400 +vertex_buffer: 0.030800 +vertex_buffer: -0.750999 +vertex_buffer: 2.311127 +vertex_buffer: 5.580282 +vertex_buffer: 0.496400 +vertex_buffer: 0.036400 +vertex_buffer: -0.175000 +vertex_buffer: 3.099635 +vertex_buffer: 5.756340 +vertex_buffer: 0.400300 +vertex_buffer: 0.030400 +vertex_buffer: -0.173999 +vertex_buffer: 3.093057 +vertex_buffer: 5.814032 +vertex_buffer: 0.398700 +vertex_buffer: 0.035400 +vertex_buffer: -0.299000 +vertex_buffer: 3.077085 +vertex_buffer: 5.811524 +vertex_buffer: 0.408900 +vertex_buffer: 0.040000 +vertex_buffer: -0.301000 +vertex_buffer: 3.082687 +vertex_buffer: 5.753816 +vertex_buffer: 0.410600 +vertex_buffer: 0.034900 +vertex_buffer: -0.423000 +vertex_buffer: 3.027142 +vertex_buffer: 5.806880 +vertex_buffer: 0.420200 +vertex_buffer: 0.043900 +vertex_buffer: -0.427000 +vertex_buffer: 3.032812 +vertex_buffer: 5.750162 +vertex_buffer: 0.421900 +vertex_buffer: 0.038700 +vertex_buffer: -0.159648 +vertex_buffer: 4.010368 +vertex_buffer: 5.744772 +vertex_buffer: 0.286400 +vertex_buffer: 0.105400 +vertex_buffer: -0.159131 +vertex_buffer: 4.022250 +vertex_buffer: 5.780377 +vertex_buffer: 0.289600 +vertex_buffer: 0.104400 +vertex_buffer: -0.159131 +vertex_buffer: 4.016697 +vertex_buffer: 5.809144 +vertex_buffer: 0.292200 +vertex_buffer: 0.103600 +vertex_buffer: -1.819000 +vertex_buffer: 4.239105 +vertex_buffer: 5.747630 +vertex_buffer: 0.319900 +vertex_buffer: 0.252000 +vertex_buffer: -1.818000 +vertex_buffer: 4.251075 +vertex_buffer: 5.773822 +vertex_buffer: 0.322500 +vertex_buffer: 0.251600 +vertex_buffer: -1.403000 +vertex_buffer: 4.176071 +vertex_buffer: 5.776840 +vertex_buffer: 0.318200 +vertex_buffer: 0.213700 +vertex_buffer: -1.404000 +vertex_buffer: 4.164105 +vertex_buffer: 5.748632 +vertex_buffer: 0.315500 +vertex_buffer: 0.214200 +vertex_buffer: -2.358000 +vertex_buffer: 4.314126 +vertex_buffer: 5.730964 +vertex_buffer: 0.323500 +vertex_buffer: 0.301200 +vertex_buffer: -2.357000 +vertex_buffer: 4.326095 +vertex_buffer: 5.757109 +vertex_buffer: 0.326000 +vertex_buffer: 0.300700 +vertex_buffer: -3.034000 +vertex_buffer: 4.368166 +vertex_buffer: 5.698512 +vertex_buffer: 0.328100 +vertex_buffer: 0.362500 +vertex_buffer: -3.033000 +vertex_buffer: 4.380135 +vertex_buffer: 5.724594 +vertex_buffer: 0.330600 +vertex_buffer: 0.361900 +vertex_buffer: -3.709000 +vertex_buffer: 4.398221 +vertex_buffer: 5.654215 +vertex_buffer: 0.334300 +vertex_buffer: 0.423700 +vertex_buffer: -3.710000 +vertex_buffer: 4.409187 +vertex_buffer: 5.680248 +vertex_buffer: 0.336800 +vertex_buffer: 0.423200 +vertex_buffer: -4.246000 +vertex_buffer: 4.400284 +vertex_buffer: 5.601825 +vertex_buffer: 0.340600 +vertex_buffer: 0.472600 +vertex_buffer: -4.250000 +vertex_buffer: 4.411252 +vertex_buffer: 5.627885 +vertex_buffer: 0.343200 +vertex_buffer: 0.472100 +vertex_buffer: -5.429000 +vertex_buffer: 3.924494 +vertex_buffer: 5.428460 +vertex_buffer: 0.440600 +vertex_buffer: 0.562900 +vertex_buffer: -5.079000 +vertex_buffer: 3.972429 +vertex_buffer: 5.482845 +vertex_buffer: 0.433800 +vertex_buffer: 0.531900 +vertex_buffer: -5.112000 +vertex_buffer: 3.972107 +vertex_buffer: 5.746829 +vertex_buffer: 0.410600 +vertex_buffer: 0.537000 +vertex_buffer: -5.465000 +vertex_buffer: 3.924174 +vertex_buffer: 5.692366 +vertex_buffer: 0.417700 +vertex_buffer: 0.569100 +vertex_buffer: -4.684000 +vertex_buffer: 4.010359 +vertex_buffer: 5.539529 +vertex_buffer: 0.427100 +vertex_buffer: 0.496500 +vertex_buffer: -4.715000 +vertex_buffer: 4.010037 +vertex_buffer: 5.804559 +vertex_buffer: 0.403600 +vertex_buffer: 0.500800 +vertex_buffer: -4.239000 +vertex_buffer: 4.032291 +vertex_buffer: 5.595924 +vertex_buffer: 0.420800 +vertex_buffer: 0.456400 +vertex_buffer: -4.267000 +vertex_buffer: 4.031969 +vertex_buffer: 5.860978 +vertex_buffer: 0.397200 +vertex_buffer: 0.460000 +vertex_buffer: -3.700000 +vertex_buffer: 4.033226 +vertex_buffer: 5.649375 +vertex_buffer: 0.414600 +vertex_buffer: 0.407700 +vertex_buffer: -3.725000 +vertex_buffer: 4.032904 +vertex_buffer: 5.914433 +vertex_buffer: 0.390900 +vertex_buffer: 0.410500 +vertex_buffer: -3.025000 +vertex_buffer: 4.004175 +vertex_buffer: 5.691690 +vertex_buffer: 0.408800 +vertex_buffer: 0.346600 +vertex_buffer: -3.045000 +vertex_buffer: 4.003853 +vertex_buffer: 5.956707 +vertex_buffer: 0.385100 +vertex_buffer: 0.348500 +vertex_buffer: -2.365000 +vertex_buffer: 3.950138 +vertex_buffer: 5.721011 +vertex_buffer: 0.405000 +vertex_buffer: 0.286800 +vertex_buffer: -2.381000 +vertex_buffer: 3.949815 +vertex_buffer: 5.986923 +vertex_buffer: 0.381200 +vertex_buffer: 0.287500 +vertex_buffer: -1.873000 +vertex_buffer: 3.874120 +vertex_buffer: 5.735144 +vertex_buffer: 0.404700 +vertex_buffer: 0.242300 +vertex_buffer: -1.885000 +vertex_buffer: 3.873796 +vertex_buffer: 6.001936 +vertex_buffer: 0.380900 +vertex_buffer: 0.240900 +vertex_buffer: -1.518000 +vertex_buffer: 3.764122 +vertex_buffer: 5.735279 +vertex_buffer: 0.408400 +vertex_buffer: 0.210300 +vertex_buffer: -1.528000 +vertex_buffer: 3.763783 +vertex_buffer: 6.013980 +vertex_buffer: 0.383900 +vertex_buffer: 0.205500 +vertex_buffer: -1.272000 +vertex_buffer: 3.607124 +vertex_buffer: 5.732809 +vertex_buffer: 0.414800 +vertex_buffer: 0.186800 +vertex_buffer: -1.280000 +vertex_buffer: 3.606770 +vertex_buffer: 6.023445 +vertex_buffer: 0.390500 +vertex_buffer: 0.177600 +vertex_buffer: -1.115000 +vertex_buffer: 3.426127 +vertex_buffer: 5.730206 +vertex_buffer: 0.422700 +vertex_buffer: 0.169700 +vertex_buffer: -1.122000 +vertex_buffer: 3.425767 +vertex_buffer: 6.026797 +vertex_buffer: 0.400200 +vertex_buffer: 0.155900 +vertex_buffer: -1.032000 +vertex_buffer: 3.240570 +vertex_buffer: 5.729738 +vertex_buffer: 0.431400 +vertex_buffer: 0.157400 +vertex_buffer: -1.039000 +vertex_buffer: 3.238753 +vertex_buffer: 6.025306 +vertex_buffer: 0.412100 +vertex_buffer: 0.139500 +vertex_buffer: -1.003000 +vertex_buffer: 3.050018 +vertex_buffer: 5.729583 +vertex_buffer: 0.441400 +vertex_buffer: 0.147900 +vertex_buffer: -1.010000 +vertex_buffer: 3.046713 +vertex_buffer: 6.021124 +vertex_buffer: 0.426100 +vertex_buffer: 0.126900 +vertex_buffer: -1.012000 +vertex_buffer: 2.855426 +vertex_buffer: 5.730688 +vertex_buffer: 0.453200 +vertex_buffer: 0.140500 +vertex_buffer: -1.018000 +vertex_buffer: 2.850726 +vertex_buffer: 6.013214 +vertex_buffer: 0.442200 +vertex_buffer: 0.117900 +vertex_buffer: -1.038000 +vertex_buffer: 2.656807 +vertex_buffer: 5.731023 +vertex_buffer: 0.466800 +vertex_buffer: 0.135100 +vertex_buffer: -1.045000 +vertex_buffer: 2.650755 +vertex_buffer: 6.005554 +vertex_buffer: 0.459700 +vertex_buffer: 0.111800 +vertex_buffer: -1.113000 +vertex_buffer: 2.231652 +vertex_buffer: 5.994177 +vertex_buffer: 0.497000 +vertex_buffer: 0.106300 +vertex_buffer: -1.105000 +vertex_buffer: 2.240546 +vertex_buffer: 5.727662 +vertex_buffer: 0.497800 +vertex_buffer: 0.130100 +vertex_buffer: -1.171000 +vertex_buffer: 1.989006 +vertex_buffer: 5.722197 +vertex_buffer: 0.518200 +vertex_buffer: 0.130800 +vertex_buffer: -1.181000 +vertex_buffer: 1.978282 +vertex_buffer: 5.988693 +vertex_buffer: 0.520100 +vertex_buffer: 0.107100 +vertex_buffer: -1.260000 +vertex_buffer: 1.705466 +vertex_buffer: 5.715535 +vertex_buffer: 0.542000 +vertex_buffer: 0.133800 +vertex_buffer: -1.271000 +vertex_buffer: 1.692739 +vertex_buffer: 5.980989 +vertex_buffer: 0.546100 +vertex_buffer: 0.110600 +vertex_buffer: -1.503000 +vertex_buffer: 1.055693 +vertex_buffer: 5.689961 +vertex_buffer: 0.596800 +vertex_buffer: 0.148200 +vertex_buffer: -1.683000 +vertex_buffer: 0.724615 +vertex_buffer: 5.669944 +vertex_buffer: 0.624600 +vertex_buffer: 0.160400 +vertex_buffer: -1.695000 +vertex_buffer: 0.704779 +vertex_buffer: 5.935099 +vertex_buffer: 0.636000 +vertex_buffer: 0.139800 +vertex_buffer: -1.519000 +vertex_buffer: 1.038253 +vertex_buffer: 5.955282 +vertex_buffer: 0.605500 +vertex_buffer: 0.126300 +vertex_buffer: -1.894000 +vertex_buffer: 0.425968 +vertex_buffer: 5.644423 +vertex_buffer: 0.648800 +vertex_buffer: 0.175900 +vertex_buffer: -1.907000 +vertex_buffer: 0.403973 +vertex_buffer: 5.909450 +vertex_buffer: 0.663600 +vertex_buffer: 0.157600 +vertex_buffer: -2.142000 +vertex_buffer: 0.189787 +vertex_buffer: 5.615995 +vertex_buffer: 0.667600 +vertex_buffer: 0.194200 +vertex_buffer: -2.156000 +vertex_buffer: 0.166070 +vertex_buffer: 5.880907 +vertex_buffer: 0.685700 +vertex_buffer: 0.179200 +vertex_buffer: -2.430000 +vertex_buffer: 0.020092 +vertex_buffer: 5.585466 +vertex_buffer: 0.681500 +vertex_buffer: 0.215700 +vertex_buffer: -2.447000 +vertex_buffer: -0.004859 +vertex_buffer: 5.850289 +vertex_buffer: 0.702400 +vertex_buffer: 0.204700 +vertex_buffer: -2.766000 +vertex_buffer: -0.081173 +vertex_buffer: 5.553818 +vertex_buffer: 0.691500 +vertex_buffer: 0.241600 +vertex_buffer: -2.784000 +vertex_buffer: -0.106871 +vertex_buffer: 5.818572 +vertex_buffer: 0.714000 +vertex_buffer: 0.234300 +vertex_buffer: -3.136000 +vertex_buffer: -0.136021 +vertex_buffer: 5.520313 +vertex_buffer: 0.698100 +vertex_buffer: 0.271600 +vertex_buffer: -3.157000 +vertex_buffer: -0.162147 +vertex_buffer: 5.785029 +vertex_buffer: 0.721300 +vertex_buffer: 0.267400 +vertex_buffer: -3.531000 +vertex_buffer: -0.166494 +vertex_buffer: 5.484131 +vertex_buffer: 0.701800 +vertex_buffer: 0.304700 +vertex_buffer: -3.555000 +vertex_buffer: -0.192763 +vertex_buffer: 5.747826 +vertex_buffer: 0.725400 +vertex_buffer: 0.303100 +vertex_buffer: -3.976000 +vertex_buffer: -0.148364 +vertex_buffer: 5.442722 +vertex_buffer: 0.702100 +vertex_buffer: 0.342800 +vertex_buffer: -4.003000 +vertex_buffer: -0.174541 +vertex_buffer: 5.706427 +vertex_buffer: 0.725800 +vertex_buffer: 0.343700 +vertex_buffer: -4.497000 +vertex_buffer: -0.058778 +vertex_buffer: 5.395236 +vertex_buffer: 0.697700 +vertex_buffer: 0.388400 +vertex_buffer: -4.527000 +vertex_buffer: -0.084248 +vertex_buffer: 5.658007 +vertex_buffer: 0.721200 +vertex_buffer: 0.391700 +vertex_buffer: -4.998000 +vertex_buffer: 0.089161 +vertex_buffer: 5.349538 +vertex_buffer: 0.689300 +vertex_buffer: 0.433000 +vertex_buffer: -5.031000 +vertex_buffer: 0.064621 +vertex_buffer: 5.613392 +vertex_buffer: 0.712400 +vertex_buffer: 0.438700 +vertex_buffer: -5.385000 +vertex_buffer: 0.281523 +vertex_buffer: 5.313533 +vertex_buffer: 0.678700 +vertex_buffer: 0.468700 +vertex_buffer: -5.421000 +vertex_buffer: 0.257431 +vertex_buffer: 5.576402 +vertex_buffer: 0.701000 +vertex_buffer: 0.476900 +vertex_buffer: -5.683000 +vertex_buffer: 0.518260 +vertex_buffer: 5.286230 +vertex_buffer: 0.666000 +vertex_buffer: 0.498600 +vertex_buffer: -5.721000 +vertex_buffer: 0.496939 +vertex_buffer: 5.548265 +vertex_buffer: 0.687200 +vertex_buffer: 0.509400 +vertex_buffer: -5.918000 +vertex_buffer: 0.804496 +vertex_buffer: 5.264621 +vertex_buffer: 0.650300 +vertex_buffer: 0.525800 +vertex_buffer: -5.957000 +vertex_buffer: 0.784151 +vertex_buffer: 5.527718 +vertex_buffer: 0.670200 +vertex_buffer: 0.538900 +vertex_buffer: -6.120000 +vertex_buffer: 1.183657 +vertex_buffer: 5.248272 +vertex_buffer: 0.628500 +vertex_buffer: 0.555500 +vertex_buffer: -6.161000 +vertex_buffer: 1.167104 +vertex_buffer: 5.510539 +vertex_buffer: 0.646800 +vertex_buffer: 0.570600 +vertex_buffer: -6.320000 +vertex_buffer: 1.704134 +vertex_buffer: 5.230291 +vertex_buffer: 0.596500 +vertex_buffer: 0.591700 +vertex_buffer: -6.362000 +vertex_buffer: 1.690342 +vertex_buffer: 5.492670 +vertex_buffer: 0.613200 +vertex_buffer: 0.608400 +vertex_buffer: -6.474000 +vertex_buffer: 2.198605 +vertex_buffer: 5.215522 +vertex_buffer: 0.564500 +vertex_buffer: 0.621700 +vertex_buffer: -6.517000 +vertex_buffer: 2.189372 +vertex_buffer: 5.478011 +vertex_buffer: 0.578900 +vertex_buffer: 0.640500 +vertex_buffer: -6.539000 +vertex_buffer: 2.504945 +vertex_buffer: 5.211481 +vertex_buffer: 0.544500 +vertex_buffer: 0.635600 +vertex_buffer: -6.582000 +vertex_buffer: 2.497934 +vertex_buffer: 5.474017 +vertex_buffer: 0.555500 +vertex_buffer: 0.656500 +vertex_buffer: -6.553000 +vertex_buffer: 2.725349 +vertex_buffer: 5.213535 +vertex_buffer: 0.530300 +vertex_buffer: 0.641900 +vertex_buffer: -6.596000 +vertex_buffer: 2.719937 +vertex_buffer: 5.476101 +vertex_buffer: 0.537300 +vertex_buffer: 0.664300 +vertex_buffer: -6.533000 +vertex_buffer: 3.199424 +vertex_buffer: 5.228640 +vertex_buffer: 0.497800 +vertex_buffer: 0.645600 +vertex_buffer: -6.470000 +vertex_buffer: 3.409966 +vertex_buffer: 5.244696 +vertex_buffer: 0.483800 +vertex_buffer: 0.641800 +vertex_buffer: -6.513000 +vertex_buffer: 3.409522 +vertex_buffer: 5.507394 +vertex_buffer: 0.474900 +vertex_buffer: 0.663500 +vertex_buffer: -6.576000 +vertex_buffer: 3.198442 +vertex_buffer: 5.491320 +vertex_buffer: 0.494500 +vertex_buffer: 0.668800 +vertex_buffer: -6.364000 +vertex_buffer: 3.585690 +vertex_buffer: 5.266664 +vertex_buffer: 0.472500 +vertex_buffer: 0.635000 +vertex_buffer: -6.406000 +vertex_buffer: 3.585370 +vertex_buffer: 5.529394 +vertex_buffer: 0.458400 +vertex_buffer: 0.653700 +vertex_buffer: -6.211000 +vertex_buffer: 3.712655 +vertex_buffer: 5.295723 +vertex_buffer: 0.463500 +vertex_buffer: 0.625200 +vertex_buffer: -6.252000 +vertex_buffer: 3.712336 +vertex_buffer: 5.558482 +vertex_buffer: 0.445200 +vertex_buffer: 0.640100 +vertex_buffer: -6.005000 +vertex_buffer: 3.802610 +vertex_buffer: 5.332299 +vertex_buffer: 0.455400 +vertex_buffer: 0.610600 +vertex_buffer: -6.045000 +vertex_buffer: 3.803292 +vertex_buffer: 5.595116 +vertex_buffer: 0.434500 +vertex_buffer: 0.621600 +vertex_buffer: -6.037000 +vertex_buffer: 4.056250 +vertex_buffer: 5.629091 +vertex_buffer: 0.411100 +vertex_buffer: 0.623400 +vertex_buffer: -6.039000 +vertex_buffer: 4.074257 +vertex_buffer: 5.623531 +vertex_buffer: 0.409400 +vertex_buffer: 0.623900 +vertex_buffer: -6.423000 +vertex_buffer: 4.004336 +vertex_buffer: 5.558594 +vertex_buffer: 0.419200 +vertex_buffer: 0.658700 +vertex_buffer: -6.423000 +vertex_buffer: 3.986328 +vertex_buffer: 5.564086 +vertex_buffer: 0.421000 +vertex_buffer: 0.658400 +vertex_buffer: -0.161204 +vertex_buffer: 3.872203 +vertex_buffer: 6.124216 +vertex_buffer: 0.328200 +vertex_buffer: 0.091200 +vertex_buffer: -0.161204 +vertex_buffer: 3.891212 +vertex_buffer: 6.117588 +vertex_buffer: 0.326700 +vertex_buffer: 0.091900 +vertex_buffer: -5.123000 +vertex_buffer: 4.039073 +vertex_buffer: 5.774945 +vertex_buffer: 0.404100 +vertex_buffer: 0.538900 +vertex_buffer: -4.721000 +vertex_buffer: 4.077003 +vertex_buffer: 5.831477 +vertex_buffer: 0.397000 +vertex_buffer: 0.502300 +vertex_buffer: -4.271000 +vertex_buffer: 4.098935 +vertex_buffer: 5.888552 +vertex_buffer: 0.390400 +vertex_buffer: 0.461300 +vertex_buffer: -3.729000 +vertex_buffer: 4.096870 +vertex_buffer: 5.941803 +vertex_buffer: 0.384400 +vertex_buffer: 0.412000 +vertex_buffer: -3.048000 +vertex_buffer: 4.063818 +vertex_buffer: 5.984507 +vertex_buffer: 0.378900 +vertex_buffer: 0.349800 +vertex_buffer: -2.381000 +vertex_buffer: 4.006780 +vertex_buffer: 6.014984 +vertex_buffer: 0.375400 +vertex_buffer: 0.288600 +vertex_buffer: -1.878000 +vertex_buffer: 3.931761 +vertex_buffer: 6.031074 +vertex_buffer: 0.374400 +vertex_buffer: 0.241400 +vertex_buffer: -1.511000 +vertex_buffer: 3.825745 +vertex_buffer: 6.044150 +vertex_buffer: 0.376500 +vertex_buffer: 0.205400 +vertex_buffer: -1.255000 +vertex_buffer: 3.678731 +vertex_buffer: 6.055741 +vertex_buffer: 0.382200 +vertex_buffer: 0.177200 +vertex_buffer: -1.089000 +vertex_buffer: 3.495725 +vertex_buffer: 6.060867 +vertex_buffer: 0.392000 +vertex_buffer: 0.154300 +vertex_buffer: -7.368000 +vertex_buffer: 3.429113 +vertex_buffer: 4.918401 +vertex_buffer: 0.485300 +vertex_buffer: 0.778700 +vertex_buffer: -7.384000 +vertex_buffer: 3.421252 +vertex_buffer: 4.934308 +vertex_buffer: 0.485600 +vertex_buffer: 0.776500 +vertex_buffer: -7.390000 +vertex_buffer: 3.421208 +vertex_buffer: 4.963075 +vertex_buffer: 0.485400 +vertex_buffer: 0.773800 +vertex_buffer: -6.731000 +vertex_buffer: 4.042418 +vertex_buffer: 5.490926 +vertex_buffer: 0.421100 +vertex_buffer: 0.690200 +vertex_buffer: -6.682000 +vertex_buffer: 4.016409 +vertex_buffer: 5.498264 +vertex_buffer: 0.421100 +vertex_buffer: 0.685200 +vertex_buffer: -6.705000 +vertex_buffer: 4.043604 +vertex_buffer: 5.335893 +vertex_buffer: 0.407500 +vertex_buffer: 0.692800 +vertex_buffer: -6.756000 +vertex_buffer: 4.052422 +vertex_buffer: 5.487188 +vertex_buffer: 0.421100 +vertex_buffer: 0.692600 +vertex_buffer: -6.730000 +vertex_buffer: 4.053610 +vertex_buffer: 5.332153 +vertex_buffer: 0.407600 +vertex_buffer: 0.696200 +vertex_buffer: -7.324000 +vertex_buffer: 3.925648 +vertex_buffer: 5.302373 +vertex_buffer: 0.441600 +vertex_buffer: 0.741500 +vertex_buffer: -7.245000 +vertex_buffer: 3.945572 +vertex_buffer: 5.364015 +vertex_buffer: 0.439000 +vertex_buffer: 0.733300 +vertex_buffer: -7.163000 +vertex_buffer: 3.956799 +vertex_buffer: 5.178050 +vertex_buffer: 0.423200 +vertex_buffer: 0.741400 +vertex_buffer: -7.388000 +vertex_buffer: 3.689770 +vertex_buffer: 5.200229 +vertex_buffer: 0.461200 +vertex_buffer: 0.753100 +vertex_buffer: -7.382000 +vertex_buffer: 3.563775 +vertex_buffer: 5.197168 +vertex_buffer: 0.472400 +vertex_buffer: 0.753000 +vertex_buffer: -7.332000 +vertex_buffer: 3.556638 +vertex_buffer: 5.309948 +vertex_buffer: 0.473200 +vertex_buffer: 0.741700 +vertex_buffer: -7.338000 +vertex_buffer: 3.693634 +vertex_buffer: 5.313308 +vertex_buffer: 0.461200 +vertex_buffer: 0.742000 +vertex_buffer: -7.394000 +vertex_buffer: 3.828768 +vertex_buffer: 5.202885 +vertex_buffer: 0.448900 +vertex_buffer: 0.752700 +vertex_buffer: -7.344000 +vertex_buffer: 3.841630 +vertex_buffer: 5.316260 +vertex_buffer: 0.448600 +vertex_buffer: 0.742100 +vertex_buffer: -7.409000 +vertex_buffer: 3.688910 +vertex_buffer: 5.086159 +vertex_buffer: 0.460500 +vertex_buffer: 0.763400 +vertex_buffer: -7.404000 +vertex_buffer: 3.567912 +vertex_buffer: 5.083268 +vertex_buffer: 0.471600 +vertex_buffer: 0.763600 +vertex_buffer: -7.378000 +vertex_buffer: 3.465778 +vertex_buffer: 5.194996 +vertex_buffer: 0.481200 +vertex_buffer: 0.752700 +vertex_buffer: -7.328000 +vertex_buffer: 3.449639 +vertex_buffer: 5.308569 +vertex_buffer: 0.482800 +vertex_buffer: 0.741300 +vertex_buffer: -7.400000 +vertex_buffer: 3.473913 +vertex_buffer: 5.082194 +vertex_buffer: 0.480200 +vertex_buffer: 0.763300 +vertex_buffer: -6.908000 +vertex_buffer: 2.184744 +vertex_buffer: 5.412607 +vertex_buffer: 0.590700 +vertex_buffer: 0.674900 +vertex_buffer: -6.905000 +vertex_buffer: 2.185023 +vertex_buffer: 5.432654 +vertex_buffer: 0.590000 +vertex_buffer: 0.673600 +vertex_buffer: -7.015000 +vertex_buffer: 2.609164 +vertex_buffer: 5.423767 +vertex_buffer: 0.557400 +vertex_buffer: 0.697000 +vertex_buffer: -7.020000 +vertex_buffer: 2.610649 +vertex_buffer: 5.402780 +vertex_buffer: 0.558100 +vertex_buffer: 0.698700 +vertex_buffer: -7.084000 +vertex_buffer: 2.909114 +vertex_buffer: 5.419270 +vertex_buffer: 0.532900 +vertex_buffer: 0.711400 +vertex_buffer: -7.088000 +vertex_buffer: 2.910428 +vertex_buffer: 5.398292 +vertex_buffer: 0.533900 +vertex_buffer: 0.713500 +vertex_buffer: -7.083000 +vertex_buffer: 2.911894 +vertex_buffer: 5.367240 +vertex_buffer: 0.535500 +vertex_buffer: 0.716300 +vertex_buffer: -7.014000 +vertex_buffer: 2.612348 +vertex_buffer: 5.372723 +vertex_buffer: 0.559400 +vertex_buffer: 0.701000 +vertex_buffer: -6.903000 +vertex_buffer: 2.185808 +vertex_buffer: 5.382556 +vertex_buffer: 0.592200 +vertex_buffer: 0.676700 +vertex_buffer: -6.533000 +vertex_buffer: 1.084648 +vertex_buffer: 5.446417 +vertex_buffer: 0.666100 +vertex_buffer: 0.601300 +vertex_buffer: -6.529000 +vertex_buffer: 1.085318 +vertex_buffer: 5.466516 +vertex_buffer: 0.665200 +vertex_buffer: 0.600100 +vertex_buffer: -6.739000 +vertex_buffer: 1.640851 +vertex_buffer: 5.449055 +vertex_buffer: 0.628600 +vertex_buffer: 0.639300 +vertex_buffer: -6.742000 +vertex_buffer: 1.640917 +vertex_buffer: 5.427999 +vertex_buffer: 0.629500 +vertex_buffer: 0.640600 +vertex_buffer: -6.737000 +vertex_buffer: 1.642377 +vertex_buffer: 5.398982 +vertex_buffer: 0.631000 +vertex_buffer: 0.642200 +vertex_buffer: -6.528000 +vertex_buffer: 1.086538 +vertex_buffer: 5.417442 +vertex_buffer: 0.667800 +vertex_buffer: 0.602700 +vertex_buffer: -6.003000 +vertex_buffer: 0.255079 +vertex_buffer: 5.489431 +vertex_buffer: 0.716100 +vertex_buffer: 0.528500 +vertex_buffer: -6.000000 +vertex_buffer: 0.257294 +vertex_buffer: 5.509676 +vertex_buffer: 0.715000 +vertex_buffer: 0.527700 +vertex_buffer: -6.288000 +vertex_buffer: 0.625632 +vertex_buffer: 5.486957 +vertex_buffer: 0.693500 +vertex_buffer: 0.563300 +vertex_buffer: -6.291000 +vertex_buffer: 0.625286 +vertex_buffer: 5.465851 +vertex_buffer: 0.694500 +vertex_buffer: 0.564300 +vertex_buffer: -6.286000 +vertex_buffer: 0.627545 +vertex_buffer: 5.436914 +vertex_buffer: 0.696300 +vertex_buffer: 0.565500 +vertex_buffer: -5.998000 +vertex_buffer: 0.257626 +vertex_buffer: 5.460527 +vertex_buffer: 0.717900 +vertex_buffer: 0.529200 +vertex_buffer: -5.206000 +vertex_buffer: -0.240425 +vertex_buffer: 5.581224 +vertex_buffer: 0.742200 +vertex_buffer: 0.448800 +vertex_buffer: -5.653000 +vertex_buffer: -0.027764 +vertex_buffer: 5.540468 +vertex_buffer: 0.730900 +vertex_buffer: 0.491200 +vertex_buffer: -5.654000 +vertex_buffer: -0.031813 +vertex_buffer: 5.520029 +vertex_buffer: 0.732100 +vertex_buffer: 0.491600 +vertex_buffer: -5.207000 +vertex_buffer: -0.244371 +vertex_buffer: 5.560722 +vertex_buffer: 0.743600 +vertex_buffer: 0.449000 +vertex_buffer: -5.650000 +vertex_buffer: -0.029051 +vertex_buffer: 5.491153 +vertex_buffer: 0.733900 +vertex_buffer: 0.492100 +vertex_buffer: -5.203000 +vertex_buffer: -0.241444 +vertex_buffer: 5.531871 +vertex_buffer: 0.745600 +vertex_buffer: 0.449300 +vertex_buffer: -4.016000 +vertex_buffer: -0.480513 +vertex_buffer: 5.673766 +vertex_buffer: 0.754500 +vertex_buffer: 0.340800 +vertex_buffer: -4.621000 +vertex_buffer: -0.391864 +vertex_buffer: 5.635941 +vertex_buffer: 0.750000 +vertex_buffer: 0.395500 +vertex_buffer: -4.620000 +vertex_buffer: -0.395742 +vertex_buffer: 5.615424 +vertex_buffer: 0.751500 +vertex_buffer: 0.395400 +vertex_buffer: -4.617000 +vertex_buffer: -0.392702 +vertex_buffer: 5.586584 +vertex_buffer: 0.753800 +vertex_buffer: 0.395400 +vertex_buffer: -4.013000 +vertex_buffer: -0.477298 +vertex_buffer: 5.643938 +vertex_buffer: 0.756800 +vertex_buffer: 0.340500 +vertex_buffer: -3.518000 +vertex_buffer: -0.492638 +vertex_buffer: 5.720739 +vertex_buffer: 0.752500 +vertex_buffer: 0.296200 +vertex_buffer: -3.084000 +vertex_buffer: -0.448549 +vertex_buffer: 5.761671 +vertex_buffer: 0.746100 +vertex_buffer: 0.257700 +vertex_buffer: -3.515000 +vertex_buffer: -0.489527 +vertex_buffer: 5.691905 +vertex_buffer: 0.754500 +vertex_buffer: 0.295700 +vertex_buffer: -3.082000 +vertex_buffer: -0.445479 +vertex_buffer: 5.732834 +vertex_buffer: 0.747900 +vertex_buffer: 0.257000 +vertex_buffer: -2.677000 +vertex_buffer: -0.361736 +vertex_buffer: 5.800979 +vertex_buffer: 0.735300 +vertex_buffer: 0.222100 +vertex_buffer: -2.305000 +vertex_buffer: -0.223998 +vertex_buffer: 5.838167 +vertex_buffer: 0.720500 +vertex_buffer: 0.190100 +vertex_buffer: -2.675000 +vertex_buffer: -0.358633 +vertex_buffer: 5.771141 +vertex_buffer: 0.737000 +vertex_buffer: 0.221200 +vertex_buffer: -2.303000 +vertex_buffer: -0.221111 +vertex_buffer: 5.809313 +vertex_buffer: 0.721900 +vertex_buffer: 0.189000 +vertex_buffer: -1.692000 +vertex_buffer: 0.245474 +vertex_buffer: 5.908408 +vertex_buffer: 0.676300 +vertex_buffer: 0.137900 +vertex_buffer: -1.976000 +vertex_buffer: -0.026330 +vertex_buffer: 5.874317 +vertex_buffer: 0.701400 +vertex_buffer: 0.161900 +vertex_buffer: -1.975000 +vertex_buffer: -0.023597 +vertex_buffer: 5.845441 +vertex_buffer: 0.702600 +vertex_buffer: 0.160700 +vertex_buffer: -1.691000 +vertex_buffer: 0.248176 +vertex_buffer: 5.877510 +vertex_buffer: 0.677500 +vertex_buffer: 0.136600 +vertex_buffer: -1.452000 +vertex_buffer: 0.604624 +vertex_buffer: 5.938093 +vertex_buffer: 0.643600 +vertex_buffer: 0.118300 +vertex_buffer: -1.254000 +vertex_buffer: 0.989443 +vertex_buffer: 5.961424 +vertex_buffer: 0.608300 +vertex_buffer: 0.103100 +vertex_buffer: -1.450000 +vertex_buffer: 0.607258 +vertex_buffer: 5.904162 +vertex_buffer: 0.644900 +vertex_buffer: 0.116600 +vertex_buffer: -1.252000 +vertex_buffer: 0.994063 +vertex_buffer: 5.922544 +vertex_buffer: 0.609600 +vertex_buffer: 0.100800 +vertex_buffer: -0.865999 +vertex_buffer: 1.935713 +vertex_buffer: 5.995851 +vertex_buffer: 0.520400 +vertex_buffer: 0.078900 +vertex_buffer: -0.772000 +vertex_buffer: 2.209203 +vertex_buffer: 6.003275 +vertex_buffer: 0.494500 +vertex_buffer: 0.074900 +vertex_buffer: -0.971999 +vertex_buffer: 1.652134 +vertex_buffer: 5.987543 +vertex_buffer: 0.547000 +vertex_buffer: 0.084400 +vertex_buffer: -0.966999 +vertex_buffer: 1.668315 +vertex_buffer: 5.944821 +vertex_buffer: 0.547200 +vertex_buffer: 0.081100 +vertex_buffer: -0.863000 +vertex_buffer: 1.948596 +vertex_buffer: 5.952001 +vertex_buffer: 0.520800 +vertex_buffer: 0.075400 +vertex_buffer: -0.769999 +vertex_buffer: 2.215653 +vertex_buffer: 5.961298 +vertex_buffer: 0.495500 +vertex_buffer: 0.071400 +vertex_buffer: -0.514999 +vertex_buffer: 2.916695 +vertex_buffer: 6.040083 +vertex_buffer: 0.425600 +vertex_buffer: 0.069300 +vertex_buffer: -0.424999 +vertex_buffer: 3.024258 +vertex_buffer: 6.048843 +vertex_buffer: 0.412600 +vertex_buffer: 0.066600 +vertex_buffer: -0.591999 +vertex_buffer: 2.742815 +vertex_buffer: 6.028760 +vertex_buffer: 0.443300 +vertex_buffer: 0.070900 +vertex_buffer: -0.591999 +vertex_buffer: 2.744505 +vertex_buffer: 5.993680 +vertex_buffer: 0.444800 +vertex_buffer: 0.067000 +vertex_buffer: -0.513999 +vertex_buffer: 2.917200 +vertex_buffer: 6.005992 +vertex_buffer: 0.427100 +vertex_buffer: 0.065200 +vertex_buffer: -0.424999 +vertex_buffer: 3.024675 +vertex_buffer: 6.013738 +vertex_buffer: 0.414200 +vertex_buffer: 0.062300 +vertex_buffer: -0.175999 +vertex_buffer: 3.101383 +vertex_buffer: 6.086278 +vertex_buffer: 0.389600 +vertex_buffer: 0.058200 +vertex_buffer: -0.175000 +vertex_buffer: 3.090547 +vertex_buffer: 6.062042 +vertex_buffer: 0.390800 +vertex_buffer: 0.056200 +vertex_buffer: -0.175000 +vertex_buffer: 3.090911 +vertex_buffer: 6.025931 +vertex_buffer: 0.392000 +vertex_buffer: 0.053200 +vertex_buffer: -0.160167 +vertex_buffer: 3.898008 +vertex_buffer: 5.887536 +vertex_buffer: 0.307400 +vertex_buffer: 0.098600 +vertex_buffer: -0.160685 +vertex_buffer: 3.897754 +vertex_buffer: 6.097155 +vertex_buffer: 0.325000 +vertex_buffer: 0.092400 +vertex_buffer: -0.308999 +vertex_buffer: 3.899940 +vertex_buffer: 5.883926 +vertex_buffer: 0.311900 +vertex_buffer: 0.111200 +vertex_buffer: -0.564999 +vertex_buffer: 3.911948 +vertex_buffer: 5.877517 +vertex_buffer: 0.319300 +vertex_buffer: 0.132900 +vertex_buffer: -0.566999 +vertex_buffer: 3.911705 +vertex_buffer: 6.076673 +vertex_buffer: 0.336500 +vertex_buffer: 0.127500 +vertex_buffer: -0.309999 +vertex_buffer: 3.899688 +vertex_buffer: 6.091125 +vertex_buffer: 0.329600 +vertex_buffer: 0.105100 +vertex_buffer: -0.913999 +vertex_buffer: 3.946957 +vertex_buffer: 5.869241 +vertex_buffer: 0.328200 +vertex_buffer: 0.163100 +vertex_buffer: -1.134000 +vertex_buffer: 3.983963 +vertex_buffer: 5.865006 +vertex_buffer: 0.332700 +vertex_buffer: 0.182700 +vertex_buffer: -1.138000 +vertex_buffer: 3.983746 +vertex_buffer: 6.043078 +vertex_buffer: 0.348800 +vertex_buffer: 0.179100 +vertex_buffer: -0.917998 +vertex_buffer: 3.946730 +vertex_buffer: 6.055342 +vertex_buffer: 0.344700 +vertex_buffer: 0.158600 +vertex_buffer: -1.440000 +vertex_buffer: 4.040969 +vertex_buffer: 5.860402 +vertex_buffer: 0.337200 +vertex_buffer: 0.210500 +vertex_buffer: -1.446000 +vertex_buffer: 4.040765 +vertex_buffer: 6.028466 +vertex_buffer: 0.352600 +vertex_buffer: 0.207800 +vertex_buffer: -1.842000 +vertex_buffer: 4.116976 +vertex_buffer: 5.854689 +vertex_buffer: 0.340800 +vertex_buffer: 0.247500 +vertex_buffer: -1.849000 +vertex_buffer: 4.116782 +vertex_buffer: 6.014755 +vertex_buffer: 0.355300 +vertex_buffer: 0.245400 +vertex_buffer: -2.365000 +vertex_buffer: 4.191994 +vertex_buffer: 5.839317 +vertex_buffer: 0.343600 +vertex_buffer: 0.295500 +vertex_buffer: -2.375000 +vertex_buffer: 4.191801 +vertex_buffer: 5.998422 +vertex_buffer: 0.357900 +vertex_buffer: 0.293700 +vertex_buffer: -3.037000 +vertex_buffer: 4.246034 +vertex_buffer: 5.806757 +vertex_buffer: 0.347700 +vertex_buffer: 0.356800 +vertex_buffer: -3.049000 +vertex_buffer: 4.245842 +vertex_buffer: 5.964849 +vertex_buffer: 0.361800 +vertex_buffer: 0.354900 +vertex_buffer: -3.715000 +vertex_buffer: 4.275089 +vertex_buffer: 5.761637 +vertex_buffer: 0.353500 +vertex_buffer: 0.418500 +vertex_buffer: -3.730000 +vertex_buffer: 4.274898 +vertex_buffer: 5.918684 +vertex_buffer: 0.367500 +vertex_buffer: 0.416400 +vertex_buffer: -4.256000 +vertex_buffer: 4.276154 +vertex_buffer: 5.708864 +vertex_buffer: 0.359900 +vertex_buffer: 0.467800 +vertex_buffer: -4.272000 +vertex_buffer: 4.275964 +vertex_buffer: 5.864934 +vertex_buffer: 0.373700 +vertex_buffer: 0.465300 +vertex_buffer: -4.708000 +vertex_buffer: 4.255222 +vertex_buffer: 5.651724 +vertex_buffer: 0.366600 +vertex_buffer: 0.509100 +vertex_buffer: -4.727000 +vertex_buffer: 4.255031 +vertex_buffer: 5.808805 +vertex_buffer: 0.380400 +vertex_buffer: 0.506400 +vertex_buffer: -5.121000 +vertex_buffer: 4.218292 +vertex_buffer: 5.593769 +vertex_buffer: 0.374100 +vertex_buffer: 0.546800 +vertex_buffer: -5.141000 +vertex_buffer: 4.218103 +vertex_buffer: 5.749862 +vertex_buffer: 0.387700 +vertex_buffer: 0.543700 +vertex_buffer: -5.541000 +vertex_buffer: 4.161374 +vertex_buffer: 5.527558 +vertex_buffer: 0.382900 +vertex_buffer: 0.585100 +vertex_buffer: -5.563000 +vertex_buffer: 4.161185 +vertex_buffer: 5.683634 +vertex_buffer: 0.396500 +vertex_buffer: 0.581700 +vertex_buffer: -6.421000 +vertex_buffer: 4.010359 +vertex_buffer: 5.539717 +vertex_buffer: 0.417300 +vertex_buffer: 0.659100 +vertex_buffer: -6.397000 +vertex_buffer: 4.010548 +vertex_buffer: 5.384649 +vertex_buffer: 0.403600 +vertex_buffer: 0.662500 +vertex_buffer: -6.544000 +vertex_buffer: 3.985577 +vertex_buffer: 5.360784 +vertex_buffer: 0.406900 +vertex_buffer: 0.676000 +vertex_buffer: -6.569000 +vertex_buffer: 3.985388 +vertex_buffer: 5.515851 +vertex_buffer: 0.420800 +vertex_buffer: 0.673000 +vertex_buffer: -6.573000 +vertex_buffer: 3.986582 +vertex_buffer: 5.355649 +vertex_buffer: 0.407300 +vertex_buffer: 0.679000 +vertex_buffer: -6.599000 +vertex_buffer: 3.985394 +vertex_buffer: 5.510681 +vertex_buffer: 0.421300 +vertex_buffer: 0.676400 +vertex_buffer: -7.373000 +vertex_buffer: 3.354023 +vertex_buffer: 5.193658 +vertex_buffer: 0.491700 +vertex_buffer: 0.752000 +vertex_buffer: -7.366000 +vertex_buffer: 3.322574 +vertex_buffer: 5.191911 +vertex_buffer: 0.494900 +vertex_buffer: 0.751700 +vertex_buffer: -7.319000 +vertex_buffer: 3.288599 +vertex_buffer: 5.301133 +vertex_buffer: 0.498200 +vertex_buffer: 0.740900 +vertex_buffer: -7.324000 +vertex_buffer: 3.321116 +vertex_buffer: 5.305876 +vertex_buffer: 0.494900 +vertex_buffer: 0.740900 +vertex_buffer: -7.375000 +vertex_buffer: 3.401205 +vertex_buffer: 5.194645 +vertex_buffer: 0.487200 +vertex_buffer: 0.752300 +vertex_buffer: -7.325000 +vertex_buffer: 3.374361 +vertex_buffer: 5.306991 +vertex_buffer: 0.489800 +vertex_buffer: 0.741000 +vertex_buffer: -7.397000 +vertex_buffer: 3.416131 +vertex_buffer: 5.080994 +vertex_buffer: 0.485600 +vertex_buffer: 0.762900 +vertex_buffer: -7.395000 +vertex_buffer: 3.373998 +vertex_buffer: 5.080125 +vertex_buffer: 0.489800 +vertex_buffer: 0.762500 +vertex_buffer: -7.387000 +vertex_buffer: 3.343615 +vertex_buffer: 5.081372 +vertex_buffer: 0.492900 +vertex_buffer: 0.761800 +vertex_buffer: -7.029000 +vertex_buffer: 3.235427 +vertex_buffer: 5.442892 +vertex_buffer: 0.500000 +vertex_buffer: 0.709900 +vertex_buffer: -7.070000 +vertex_buffer: 3.091998 +vertex_buffer: 5.433068 +vertex_buffer: 0.514600 +vertex_buffer: 0.712700 +vertex_buffer: -7.022000 +vertex_buffer: 2.889068 +vertex_buffer: 5.435387 +vertex_buffer: 0.532600 +vertex_buffer: 0.704800 +vertex_buffer: -6.851000 +vertex_buffer: 2.185456 +vertex_buffer: 5.448593 +vertex_buffer: 0.589200 +vertex_buffer: 0.669000 +vertex_buffer: -6.686000 +vertex_buffer: 1.648025 +vertex_buffer: 5.464385 +vertex_buffer: 0.627300 +vertex_buffer: 0.634700 +vertex_buffer: -6.477000 +vertex_buffer: 1.096167 +vertex_buffer: 5.483227 +vertex_buffer: 0.663500 +vertex_buffer: 0.595600 +vertex_buffer: -6.241000 +vertex_buffer: 0.648369 +vertex_buffer: 5.502466 +vertex_buffer: 0.690900 +vertex_buffer: 0.559700 +vertex_buffer: -5.962000 +vertex_buffer: 0.290931 +vertex_buffer: 5.525226 +vertex_buffer: 0.711800 +vertex_buffer: 0.525300 +vertex_buffer: -5.621000 +vertex_buffer: 0.011939 +vertex_buffer: 5.554988 +vertex_buffer: 0.727300 +vertex_buffer: 0.489600 +vertex_buffer: -5.182000 +vertex_buffer: -0.198576 +vertex_buffer: 5.594482 +vertex_buffer: 0.738500 +vertex_buffer: 0.447900 +vertex_buffer: -4.608000 +vertex_buffer: -0.350880 +vertex_buffer: 5.648281 +vertex_buffer: 0.746300 +vertex_buffer: 0.395400 +vertex_buffer: -1.113000 +vertex_buffer: 3.449733 +vertex_buffer: 6.054213 +vertex_buffer: 0.397000 +vertex_buffer: 0.155000 +vertex_buffer: -1.026000 +vertex_buffer: 3.253676 +vertex_buffer: 6.053582 +vertex_buffer: 0.409500 +vertex_buffer: 0.137700 +vertex_buffer: -0.996000 +vertex_buffer: 3.282831 +vertex_buffer: 6.059953 +vertex_buffer: 0.406300 +vertex_buffer: 0.135300 +vertex_buffer: -0.995000 +vertex_buffer: 3.051447 +vertex_buffer: 6.047233 +vertex_buffer: 0.424700 +vertex_buffer: 0.124600 +vertex_buffer: -0.959000 +vertex_buffer: 3.060441 +vertex_buffer: 6.053281 +vertex_buffer: 0.423100 +vertex_buffer: 0.121200 +vertex_buffer: -1.002000 +vertex_buffer: 2.851300 +vertex_buffer: 6.039246 +vertex_buffer: 0.441300 +vertex_buffer: 0.115500 +vertex_buffer: -0.963999 +vertex_buffer: 2.854220 +vertex_buffer: 6.045182 +vertex_buffer: 0.440400 +vertex_buffer: 0.112300 +vertex_buffer: -1.029000 +vertex_buffer: 2.651189 +vertex_buffer: 6.031563 +vertex_buffer: 0.459000 +vertex_buffer: 0.109600 +vertex_buffer: -0.990999 +vertex_buffer: 2.653096 +vertex_buffer: 6.036448 +vertex_buffer: 0.458200 +vertex_buffer: 0.106700 +vertex_buffer: -1.097000 +vertex_buffer: 2.228836 +vertex_buffer: 6.018098 +vertex_buffer: 0.496900 +vertex_buffer: 0.104000 +vertex_buffer: -1.165000 +vertex_buffer: 1.975309 +vertex_buffer: 6.012578 +vertex_buffer: 0.520100 +vertex_buffer: 0.104800 +vertex_buffer: -1.128000 +vertex_buffer: 1.970127 +vertex_buffer: 6.016251 +vertex_buffer: 0.520000 +vertex_buffer: 0.101900 +vertex_buffer: -1.059000 +vertex_buffer: 2.224682 +vertex_buffer: 6.021839 +vertex_buffer: 0.496600 +vertex_buffer: 0.100900 +vertex_buffer: -1.256000 +vertex_buffer: 1.689579 +vertex_buffer: 6.004839 +vertex_buffer: 0.546200 +vertex_buffer: 0.108400 +vertex_buffer: -1.220000 +vertex_buffer: 1.684315 +vertex_buffer: 6.009456 +vertex_buffer: 0.546200 +vertex_buffer: 0.105700 +vertex_buffer: -1.473000 +vertex_buffer: 1.027319 +vertex_buffer: 5.983487 +vertex_buffer: 0.606200 +vertex_buffer: 0.121800 +vertex_buffer: -1.505000 +vertex_buffer: 1.033672 +vertex_buffer: 5.979010 +vertex_buffer: 0.606100 +vertex_buffer: 0.124200 +vertex_buffer: -1.682000 +vertex_buffer: 0.697967 +vertex_buffer: 5.958683 +vertex_buffer: 0.636900 +vertex_buffer: 0.137900 +vertex_buffer: -1.653000 +vertex_buffer: 0.685548 +vertex_buffer: 5.962807 +vertex_buffer: 0.637600 +vertex_buffer: 0.135700 +vertex_buffer: -1.897000 +vertex_buffer: 0.393956 +vertex_buffer: 5.932780 +vertex_buffer: 0.664800 +vertex_buffer: 0.155900 +vertex_buffer: -1.870000 +vertex_buffer: 0.373460 +vertex_buffer: 5.936254 +vertex_buffer: 0.666200 +vertex_buffer: 0.153700 +vertex_buffer: -2.147000 +vertex_buffer: 0.153813 +vertex_buffer: 5.904973 +vertex_buffer: 0.687200 +vertex_buffer: 0.177600 +vertex_buffer: -2.126000 +vertex_buffer: 0.129370 +vertex_buffer: 5.906939 +vertex_buffer: 0.688900 +vertex_buffer: 0.175700 +vertex_buffer: -2.440000 +vertex_buffer: -0.020145 +vertex_buffer: 5.872978 +vertex_buffer: 0.704100 +vertex_buffer: 0.203300 +vertex_buffer: -2.423000 +vertex_buffer: -0.046668 +vertex_buffer: 5.875523 +vertex_buffer: 0.706000 +vertex_buffer: 0.201700 +vertex_buffer: -2.780000 +vertex_buffer: -0.124213 +vertex_buffer: 5.840999 +vertex_buffer: 0.715900 +vertex_buffer: 0.233200 +vertex_buffer: -2.766000 +vertex_buffer: -0.156626 +vertex_buffer: 5.841841 +vertex_buffer: 0.718300 +vertex_buffer: 0.231700 +vertex_buffer: -3.155000 +vertex_buffer: -0.181509 +vertex_buffer: 5.807231 +vertex_buffer: 0.723500 +vertex_buffer: 0.266600 +vertex_buffer: -3.146000 +vertex_buffer: -0.218907 +vertex_buffer: 5.807522 +vertex_buffer: 0.726200 +vertex_buffer: 0.265400 +vertex_buffer: -3.555000 +vertex_buffer: -0.213121 +vertex_buffer: 5.769910 +vertex_buffer: 0.727900 +vertex_buffer: 0.302500 +vertex_buffer: -3.550000 +vertex_buffer: -0.253493 +vertex_buffer: 5.769868 +vertex_buffer: 0.731200 +vertex_buffer: 0.301500 +vertex_buffer: -6.275000 +vertex_buffer: 3.721311 +vertex_buffer: 5.579502 +vertex_buffer: 0.444000 +vertex_buffer: 0.642800 +vertex_buffer: -6.313000 +vertex_buffer: 3.740315 +vertex_buffer: 5.576495 +vertex_buffer: 0.442700 +vertex_buffer: 0.646700 +vertex_buffer: -6.066000 +vertex_buffer: 3.814266 +vertex_buffer: 5.616144 +vertex_buffer: 0.433000 +vertex_buffer: 0.624100 +vertex_buffer: -5.474000 +vertex_buffer: 3.945145 +vertex_buffer: 5.715736 +vertex_buffer: 0.415300 +vertex_buffer: 0.570300 +vertex_buffer: -5.118000 +vertex_buffer: 3.995078 +vertex_buffer: 5.771229 +vertex_buffer: 0.407800 +vertex_buffer: 0.537900 +vertex_buffer: -4.719000 +vertex_buffer: 4.033009 +vertex_buffer: 5.827888 +vertex_buffer: 0.400800 +vertex_buffer: 0.501500 +vertex_buffer: -4.270000 +vertex_buffer: 4.054939 +vertex_buffer: 5.885190 +vertex_buffer: 0.394300 +vertex_buffer: 0.460500 +vertex_buffer: -3.727000 +vertex_buffer: 4.054875 +vertex_buffer: 5.938567 +vertex_buffer: 0.388000 +vertex_buffer: 0.411000 +vertex_buffer: -3.047000 +vertex_buffer: 4.023822 +vertex_buffer: 5.980991 +vertex_buffer: 0.382400 +vertex_buffer: 0.349000 +vertex_buffer: -2.382000 +vertex_buffer: 3.968788 +vertex_buffer: 6.010306 +vertex_buffer: 0.378700 +vertex_buffer: 0.287800 +vertex_buffer: -1.883000 +vertex_buffer: 3.892768 +vertex_buffer: 6.026328 +vertex_buffer: 0.378100 +vertex_buffer: 0.240900 +vertex_buffer: -1.523000 +vertex_buffer: 3.784751 +vertex_buffer: 6.039411 +vertex_buffer: 0.380800 +vertex_buffer: 0.205200 +vertex_buffer: -1.273000 +vertex_buffer: 3.631739 +vertex_buffer: 6.049940 +vertex_buffer: 0.387200 +vertex_buffer: 0.177300 +vertex_buffer: -0.564000 +vertex_buffer: 3.918010 +vertex_buffer: 5.826788 +vertex_buffer: 0.314700 +vertex_buffer: 0.134200 +vertex_buffer: -0.562999 +vertex_buffer: 3.936031 +vertex_buffer: 5.810307 +vertex_buffer: 0.312400 +vertex_buffer: 0.134900 +vertex_buffer: -0.906999 +vertex_buffer: 3.973032 +vertex_buffer: 5.807086 +vertex_buffer: 0.321400 +vertex_buffer: 0.164600 +vertex_buffer: -0.912000 +vertex_buffer: 3.954015 +vertex_buffer: 5.822536 +vertex_buffer: 0.323900 +vertex_buffer: 0.164200 +vertex_buffer: -0.555999 +vertex_buffer: 4.021028 +vertex_buffer: 5.812591 +vertex_buffer: 0.305000 +vertex_buffer: 0.137100 +vertex_buffer: -6.796000 +vertex_buffer: 4.028399 +vertex_buffer: 5.505472 +vertex_buffer: 0.424700 +vertex_buffer: 0.694500 +vertex_buffer: -6.812000 +vertex_buffer: 4.040413 +vertex_buffer: 5.495811 +vertex_buffer: 0.423800 +vertex_buffer: 0.696400 +vertex_buffer: -7.065000 +vertex_buffer: 3.984468 +vertex_buffer: 5.449779 +vertex_buffer: 0.433200 +vertex_buffer: 0.717400 +vertex_buffer: -7.064000 +vertex_buffer: 3.965459 +vertex_buffer: 5.457219 +vertex_buffer: 0.434800 +vertex_buffer: 0.716900 +vertex_buffer: -0.571999 +vertex_buffer: 3.883672 +vertex_buffer: 6.106044 +vertex_buffer: 0.341000 +vertex_buffer: 0.126100 +vertex_buffer: -0.568999 +vertex_buffer: 3.904679 +vertex_buffer: 6.098559 +vertex_buffer: 0.338800 +vertex_buffer: 0.126800 +vertex_buffer: -7.050000 +vertex_buffer: 3.967820 +vertex_buffer: 5.160675 +vertex_buffer: 0.412500 +vertex_buffer: 0.734100 +vertex_buffer: -7.066000 +vertex_buffer: 3.978794 +vertex_buffer: 5.181994 +vertex_buffer: 0.415200 +vertex_buffer: 0.733500 +vertex_buffer: -7.077000 +vertex_buffer: 3.977756 +vertex_buffer: 5.212903 +vertex_buffer: 0.418000 +vertex_buffer: 0.731900 +vertex_buffer: -1.096000 +vertex_buffer: 4.111107 +vertex_buffer: 5.746662 +vertex_buffer: 0.310400 +vertex_buffer: 0.186400 +vertex_buffer: -1.095000 +vertex_buffer: 4.122072 +vertex_buffer: 5.775843 +vertex_buffer: 0.313200 +vertex_buffer: 0.185900 +vertex_buffer: -0.882000 +vertex_buffer: 4.087072 +vertex_buffer: 5.776321 +vertex_buffer: 0.308800 +vertex_buffer: 0.166800 +vertex_buffer: -0.882000 +vertex_buffer: 4.076110 +vertex_buffer: 5.745146 +vertex_buffer: 0.305900 +vertex_buffer: 0.167400 +vertex_buffer: -6.778000 +vertex_buffer: 4.053427 +vertex_buffer: 5.483189 +vertex_buffer: 0.421400 +vertex_buffer: 0.694400 +vertex_buffer: -6.752000 +vertex_buffer: 4.053615 +vertex_buffer: 5.328125 +vertex_buffer: 0.408300 +vertex_buffer: 0.699200 +vertex_buffer: -6.795000 +vertex_buffer: 4.044626 +vertex_buffer: 5.319826 +vertex_buffer: 0.410000 +vertex_buffer: 0.704000 +vertex_buffer: -6.816000 +vertex_buffer: 4.044436 +vertex_buffer: 5.475887 +vertex_buffer: 0.422500 +vertex_buffer: 0.697700 +vertex_buffer: -7.145000 +vertex_buffer: 3.551500 +vertex_buffer: 5.422663 +vertex_buffer: 0.472300 +vertex_buffer: 0.721300 +vertex_buffer: -7.246000 +vertex_buffer: 3.554552 +vertex_buffer: 5.380754 +vertex_buffer: 0.473000 +vertex_buffer: 0.731400 +vertex_buffer: -7.241000 +vertex_buffer: 3.424557 +vertex_buffer: 5.378862 +vertex_buffer: 0.484800 +vertex_buffer: 0.730900 +vertex_buffer: -7.139000 +vertex_buffer: 3.402742 +vertex_buffer: 5.420340 +vertex_buffer: 0.485800 +vertex_buffer: 0.720700 +vertex_buffer: -7.239000 +vertex_buffer: 3.333665 +vertex_buffer: 5.376942 +vertex_buffer: 0.493500 +vertex_buffer: 0.730900 +vertex_buffer: -7.129000 +vertex_buffer: 3.258313 +vertex_buffer: 5.422170 +vertex_buffer: 0.499800 +vertex_buffer: 0.719700 +vertex_buffer: -6.546000 +vertex_buffer: 3.924355 +vertex_buffer: 5.543285 +vertex_buffer: 0.428400 +vertex_buffer: 0.669600 +vertex_buffer: -6.678000 +vertex_buffer: 4.063673 +vertex_buffer: 5.280669 +vertex_buffer: 0.400200 +vertex_buffer: 0.692200 +vertex_buffer: -6.659000 +vertex_buffer: 4.052661 +vertex_buffer: 5.290369 +vertex_buffer: 0.400700 +vertex_buffer: 0.689600 +vertex_buffer: -6.495000 +vertex_buffer: 4.087629 +vertex_buffer: 5.317235 +vertex_buffer: 0.394700 +vertex_buffer: 0.674500 +vertex_buffer: -6.689000 +vertex_buffer: 4.045652 +vertex_buffer: 5.298048 +vertex_buffer: 0.403300 +vertex_buffer: 0.692300 +vertex_buffer: -6.590000 +vertex_buffer: 3.995635 +vertex_buffer: 5.312876 +vertex_buffer: 0.404000 +vertex_buffer: 0.681800 +vertex_buffer: -6.564000 +vertex_buffer: 3.999645 +vertex_buffer: 5.304150 +vertex_buffer: 0.402800 +vertex_buffer: 0.679700 +vertex_buffer: -6.680000 +vertex_buffer: 4.067701 +vertex_buffer: 5.257845 +vertex_buffer: 0.398500 +vertex_buffer: 0.693800 +vertex_buffer: -6.672000 +vertex_buffer: 4.055732 +vertex_buffer: 5.232574 +vertex_buffer: 0.396100 +vertex_buffer: 0.694900 +vertex_buffer: -5.742000 +vertex_buffer: 3.870557 +vertex_buffer: 5.377138 +vertex_buffer: 0.447700 +vertex_buffer: 0.589700 +vertex_buffer: -5.779000 +vertex_buffer: 3.871235 +vertex_buffer: 5.641031 +vertex_buffer: 0.425400 +vertex_buffer: 0.597600 +vertex_buffer: -6.572000 +vertex_buffer: 3.979364 +vertex_buffer: 5.534718 +vertex_buffer: 0.422900 +vertex_buffer: 0.672700 +vertex_buffer: -6.574000 +vertex_buffer: 3.962357 +vertex_buffer: 5.540226 +vertex_buffer: 0.425000 +vertex_buffer: 0.672600 +vertex_buffer: -6.014000 +vertex_buffer: 4.080470 +vertex_buffer: 5.448570 +vertex_buffer: 0.394000 +vertex_buffer: 0.627900 +vertex_buffer: -6.038000 +vertex_buffer: 4.080281 +vertex_buffer: 5.604630 +vertex_buffer: 0.407700 +vertex_buffer: 0.624400 +vertex_buffer: -5.795000 +vertex_buffer: 3.887209 +vertex_buffer: 5.663206 +vertex_buffer: 0.423500 +vertex_buffer: 0.599500 +vertex_buffer: -1.056000 +vertex_buffer: 2.449090 +vertex_buffer: 6.023953 +vertex_buffer: 0.477000 +vertex_buffer: 0.105800 +vertex_buffer: -1.072000 +vertex_buffer: 2.449749 +vertex_buffer: 5.999982 +vertex_buffer: 0.477400 +vertex_buffer: 0.108000 +vertex_buffer: -1.381000 +vertex_buffer: 1.372995 +vertex_buffer: 5.970079 +vertex_buffer: 0.575100 +vertex_buffer: 0.116800 +vertex_buffer: -1.366000 +vertex_buffer: 1.369585 +vertex_buffer: 5.994883 +vertex_buffer: 0.575400 +vertex_buffer: 0.114600 +vertex_buffer: -6.599000 +vertex_buffer: 2.960689 +vertex_buffer: 5.481006 +vertex_buffer: 0.516200 +vertex_buffer: 0.668900 +vertex_buffer: -6.604000 +vertex_buffer: 2.495419 +vertex_buffer: 5.493910 +vertex_buffer: 0.556300 +vertex_buffer: 0.658700 +vertex_buffer: -6.642000 +vertex_buffer: 2.492533 +vertex_buffer: 5.490612 +vertex_buffer: 0.557200 +vertex_buffer: 0.661600 +vertex_buffer: -6.984000 +vertex_buffer: 2.616411 +vertex_buffer: 5.200926 +vertex_buffer: 0.568500 +vertex_buffer: 0.713600 +vertex_buffer: -6.971000 +vertex_buffer: 2.618430 +vertex_buffer: 5.157135 +vertex_buffer: 0.571400 +vertex_buffer: 0.716500 +vertex_buffer: -6.555000 +vertex_buffer: 2.964368 +vertex_buffer: 5.218408 +vertex_buffer: 0.514000 +vertex_buffer: 0.645600 +vertex_buffer: -7.141000 +vertex_buffer: 3.158391 +vertex_buffer: 5.308101 +vertex_buffer: 0.517500 +vertex_buffer: 0.733100 +vertex_buffer: -7.113000 +vertex_buffer: 3.077379 +vertex_buffer: 5.321052 +vertex_buffer: 0.524200 +vertex_buffer: 0.728200 +vertex_buffer: -7.121000 +vertex_buffer: 3.076922 +vertex_buffer: 5.363109 +vertex_buffer: 0.521800 +vertex_buffer: 0.724300 +vertex_buffer: -7.155000 +vertex_buffer: 3.160980 +vertex_buffer: 5.351272 +vertex_buffer: 0.514700 +vertex_buffer: 0.729400 +vertex_buffer: -7.144000 +vertex_buffer: 3.167877 +vertex_buffer: 5.235524 +vertex_buffer: 0.519600 +vertex_buffer: 0.739900 +vertex_buffer: -7.109000 +vertex_buffer: 3.086271 +vertex_buffer: 5.227436 +vertex_buffer: 0.527800 +vertex_buffer: 0.736600 +vertex_buffer: -7.063000 +vertex_buffer: 2.915911 +vertex_buffer: 5.234352 +vertex_buffer: 0.542200 +vertex_buffer: 0.727200 +vertex_buffer: -7.076000 +vertex_buffer: 2.912526 +vertex_buffer: 5.326190 +vertex_buffer: 0.537700 +vertex_buffer: 0.719800 +vertex_buffer: -6.956000 +vertex_buffer: 2.592929 +vertex_buffer: 5.440157 +vertex_buffer: 0.557400 +vertex_buffer: 0.691300 +vertex_buffer: -6.994000 +vertex_buffer: 2.602989 +vertex_buffer: 5.433540 +vertex_buffer: 0.557400 +vertex_buffer: 0.694800 +vertex_buffer: -7.063000 +vertex_buffer: 2.904015 +vertex_buffer: 5.429003 +vertex_buffer: 0.532400 +vertex_buffer: 0.709000 +vertex_buffer: -6.885000 +vertex_buffer: 2.184675 +vertex_buffer: 5.442623 +vertex_buffer: 0.589700 +vertex_buffer: 0.671800 +vertex_buffer: -6.739000 +vertex_buffer: 4.041395 +vertex_buffer: 5.508930 +vertex_buffer: 0.422600 +vertex_buffer: 0.690400 +vertex_buffer: -6.757000 +vertex_buffer: 4.049406 +vertex_buffer: 5.501130 +vertex_buffer: 0.422200 +vertex_buffer: 0.692200 +vertex_buffer: -6.775000 +vertex_buffer: 4.049404 +vertex_buffer: 5.503119 +vertex_buffer: 0.422800 +vertex_buffer: 0.693500 +vertex_buffer: -6.756000 +vertex_buffer: 4.037393 +vertex_buffer: 5.511793 +vertex_buffer: 0.423300 +vertex_buffer: 0.691500 +vertex_buffer: -6.755000 +vertex_buffer: 3.997394 +vertex_buffer: 5.510575 +vertex_buffer: 0.426400 +vertex_buffer: 0.690400 +vertex_buffer: -7.407000 +vertex_buffer: 3.821051 +vertex_buffer: 4.970423 +vertex_buffer: 0.446900 +vertex_buffer: 0.772700 +vertex_buffer: -6.630000 +vertex_buffer: 3.993399 +vertex_buffer: 5.505770 +vertex_buffer: 0.421300 +vertex_buffer: 0.679800 +vertex_buffer: -6.604000 +vertex_buffer: 3.993588 +vertex_buffer: 5.350708 +vertex_buffer: 0.407300 +vertex_buffer: 0.682200 +vertex_buffer: -7.341000 +vertex_buffer: 3.915632 +vertex_buffer: 5.314274 +vertex_buffer: 0.443300 +vertex_buffer: 0.741700 +vertex_buffer: -7.390000 +vertex_buffer: 3.900766 +vertex_buffer: 5.203783 +vertex_buffer: 0.442700 +vertex_buffer: 0.751600 +vertex_buffer: -7.396000 +vertex_buffer: 3.877765 +vertex_buffer: 5.204231 +vertex_buffer: 0.444700 +vertex_buffer: 0.752200 +vertex_buffer: -7.346000 +vertex_buffer: 3.892628 +vertex_buffer: 5.317687 +vertex_buffer: 0.444800 +vertex_buffer: 0.741900 +vertex_buffer: -7.411000 +vertex_buffer: 3.893903 +vertex_buffer: 5.092457 +vertex_buffer: 0.441400 +vertex_buffer: 0.760900 +vertex_buffer: -7.417000 +vertex_buffer: 3.870906 +vertex_buffer: 5.089935 +vertex_buffer: 0.443800 +vertex_buffer: 0.761900 +vertex_buffer: -7.415000 +vertex_buffer: 3.822906 +vertex_buffer: 5.088636 +vertex_buffer: 0.448300 +vertex_buffer: 0.762600 +vertex_buffer: -0.307999 +vertex_buffer: 4.005022 +vertex_buffer: 5.815707 +vertex_buffer: 0.298100 +vertex_buffer: 0.115800 +vertex_buffer: -0.307000 +vertex_buffer: 4.021034 +vertex_buffer: 5.807157 +vertex_buffer: 0.296400 +vertex_buffer: 0.116200 +vertex_buffer: -0.555000 +vertex_buffer: 4.039038 +vertex_buffer: 5.804111 +vertex_buffer: 0.303200 +vertex_buffer: 0.137600 +vertex_buffer: -0.308999 +vertex_buffer: 3.923026 +vertex_buffer: 5.813643 +vertex_buffer: 0.305100 +vertex_buffer: 0.113300 +vertex_buffer: -7.259000 +vertex_buffer: 3.864542 +vertex_buffer: 5.387858 +vertex_buffer: 0.446100 +vertex_buffer: 0.732800 +vertex_buffer: -7.252000 +vertex_buffer: 3.700547 +vertex_buffer: 5.384385 +vertex_buffer: 0.460200 +vertex_buffer: 0.732000 +vertex_buffer: -7.158000 +vertex_buffer: 3.891491 +vertex_buffer: 5.430691 +vertex_buffer: 0.442500 +vertex_buffer: 0.724000 +vertex_buffer: -7.160000 +vertex_buffer: 3.943489 +vertex_buffer: 5.432212 +vertex_buffer: 0.438200 +vertex_buffer: 0.724600 +vertex_buffer: -7.261000 +vertex_buffer: 3.916541 +vertex_buffer: 5.389345 +vertex_buffer: 0.442100 +vertex_buffer: 0.733100 +vertex_buffer: -0.555000 +vertex_buffer: 4.034110 +vertex_buffer: 5.744215 +vertex_buffer: 0.297600 +vertex_buffer: 0.139000 +vertex_buffer: -0.553999 +vertex_buffer: 4.045070 +vertex_buffer: 5.776381 +vertex_buffer: 0.300600 +vertex_buffer: 0.138200 +vertex_buffer: -0.307000 +vertex_buffer: 4.027068 +vertex_buffer: 5.778411 +vertex_buffer: 0.293800 +vertex_buffer: 0.117000 +vertex_buffer: -0.307999 +vertex_buffer: 4.017110 +vertex_buffer: 5.744282 +vertex_buffer: 0.290800 +vertex_buffer: 0.117900 +vertex_buffer: -0.308999 +vertex_buffer: 3.906005 +vertex_buffer: 5.831179 +vertex_buffer: 0.307300 +vertex_buffer: 0.112600 +vertex_buffer: -7.258000 +vertex_buffer: 3.938548 +vertex_buffer: 5.383944 +vertex_buffer: 0.440600 +vertex_buffer: 0.733000 +vertex_buffer: -7.159000 +vertex_buffer: 3.963499 +vertex_buffer: 5.424780 +vertex_buffer: 0.436600 +vertex_buffer: 0.724900 +vertex_buffer: -0.311000 +vertex_buffer: 3.873654 +vertex_buffer: 6.119599 +vertex_buffer: 0.333600 +vertex_buffer: 0.103500 +vertex_buffer: -0.311000 +vertex_buffer: 3.892664 +vertex_buffer: 6.112020 +vertex_buffer: 0.331700 +vertex_buffer: 0.104400 +vertex_buffer: -7.174000 +vertex_buffer: 3.952909 +vertex_buffer: 5.086542 +vertex_buffer: 0.422100 +vertex_buffer: 0.749800 +vertex_buffer: -7.394000 +vertex_buffer: 3.902982 +vertex_buffer: 5.026949 +vertex_buffer: 0.438100 +vertex_buffer: 0.765000 +vertex_buffer: -7.393000 +vertex_buffer: 3.904894 +vertex_buffer: 5.098479 +vertex_buffer: 0.439300 +vertex_buffer: 0.759200 +vertex_buffer: -7.241000 +vertex_buffer: 3.274391 +vertex_buffer: 5.375715 +vertex_buffer: 0.499600 +vertex_buffer: 0.731200 +vertex_buffer: -7.242000 +vertex_buffer: 3.240843 +vertex_buffer: 5.369015 +vertex_buffer: 0.503300 +vertex_buffer: 0.731900 +vertex_buffer: -7.166000 +vertex_buffer: 3.173387 +vertex_buffer: 5.403695 +vertex_buffer: 0.510000 +vertex_buffer: 0.724400 +vertex_buffer: -7.159000 +vertex_buffer: 3.201010 +vertex_buffer: 5.412225 +vertex_buffer: 0.506400 +vertex_buffer: 0.722900 +vertex_buffer: -7.108000 +vertex_buffer: 3.090089 +vertex_buffer: 5.426351 +vertex_buffer: 0.516200 +vertex_buffer: 0.716700 +vertex_buffer: -7.124000 +vertex_buffer: 3.079314 +vertex_buffer: 5.416255 +vertex_buffer: 0.518200 +vertex_buffer: 0.718900 +vertex_buffer: -6.719000 +vertex_buffer: 1.643322 +vertex_buffer: 5.459179 +vertex_buffer: 0.628100 +vertex_buffer: 0.637500 +vertex_buffer: -6.510000 +vertex_buffer: 1.089627 +vertex_buffer: 5.476794 +vertex_buffer: 0.664600 +vertex_buffer: 0.598500 +vertex_buffer: -6.271000 +vertex_buffer: 0.633869 +vertex_buffer: 5.496512 +vertex_buffer: 0.692600 +vertex_buffer: 0.562000 +vertex_buffer: -5.986000 +vertex_buffer: 0.269421 +vertex_buffer: 5.519618 +vertex_buffer: 0.713900 +vertex_buffer: 0.526900 +vertex_buffer: -5.641000 +vertex_buffer: -0.012627 +vertex_buffer: 5.549834 +vertex_buffer: 0.729600 +vertex_buffer: 0.490600 +vertex_buffer: -5.197000 +vertex_buffer: -0.225226 +vertex_buffer: 5.589799 +vertex_buffer: 0.740900 +vertex_buffer: 0.448400 +vertex_buffer: -4.616000 +vertex_buffer: -0.376667 +vertex_buffer: 5.644580 +vertex_buffer: 0.748600 +vertex_buffer: 0.395400 +vertex_buffer: -7.055000 +vertex_buffer: 3.734465 +vertex_buffer: 5.451614 +vertex_buffer: 0.454700 +vertex_buffer: 0.713600 +vertex_buffer: -7.151000 +vertex_buffer: 3.714496 +vertex_buffer: 5.425767 +vertex_buffer: 0.457900 +vertex_buffer: 0.722400 +vertex_buffer: -7.062000 +vertex_buffer: 3.915460 +vertex_buffer: 5.455733 +vertex_buffer: 0.439000 +vertex_buffer: 0.716100 +vertex_buffer: -0.741999 +vertex_buffer: 2.489950 +vertex_buffer: 6.039368 +vertex_buffer: 0.466900 +vertex_buffer: 0.079900 +vertex_buffer: -0.708000 +vertex_buffer: 2.492911 +vertex_buffer: 6.041574 +vertex_buffer: 0.466400 +vertex_buffer: 0.076800 +vertex_buffer: -0.686999 +vertex_buffer: 2.492061 +vertex_buffer: 6.035655 +vertex_buffer: 0.466700 +vertex_buffer: 0.074600 +vertex_buffer: -0.511999 +vertex_buffer: 2.920268 +vertex_buffer: 5.799177 +vertex_buffer: 0.432500 +vertex_buffer: 0.046600 +vertex_buffer: -0.517000 +vertex_buffer: 2.925090 +vertex_buffer: 5.743452 +vertex_buffer: 0.434200 +vertex_buffer: 0.041600 +vertex_buffer: -1.065000 +vertex_buffer: 2.457159 +vertex_buffer: 5.730452 +vertex_buffer: 0.481200 +vertex_buffer: 0.131700 +vertex_buffer: -0.679000 +vertex_buffer: 2.492613 +vertex_buffer: 6.014641 +vertex_buffer: 0.467600 +vertex_buffer: 0.072300 +vertex_buffer: -0.679000 +vertex_buffer: 2.495623 +vertex_buffer: 5.976590 +vertex_buffer: 0.468900 +vertex_buffer: 0.068700 +vertex_buffer: -1.018000 +vertex_buffer: 2.447954 +vertex_buffer: 6.028774 +vertex_buffer: 0.476500 +vertex_buffer: 0.102800 +vertex_buffer: -0.305000 +vertex_buffer: 3.083319 +vertex_buffer: 6.079671 +vertex_buffer: 0.399200 +vertex_buffer: 0.065200 +vertex_buffer: -0.301000 +vertex_buffer: 3.074498 +vertex_buffer: 6.056509 +vertex_buffer: 0.400800 +vertex_buffer: 0.062100 +vertex_buffer: -0.301000 +vertex_buffer: 3.074878 +vertex_buffer: 6.020398 +vertex_buffer: 0.402500 +vertex_buffer: 0.058200 +vertex_buffer: -0.170015 +vertex_buffer: 3.194879 +vertex_buffer: 6.097709 +vertex_buffer: 0.382600 +vertex_buffer: 0.063800 +vertex_buffer: -0.178999 +vertex_buffer: 3.134543 +vertex_buffer: 6.095752 +vertex_buffer: 0.387600 +vertex_buffer: 0.060600 +vertex_buffer: -0.317000 +vertex_buffer: 3.111432 +vertex_buffer: 6.088937 +vertex_buffer: 0.397000 +vertex_buffer: 0.068900 +vertex_buffer: -0.327999 +vertex_buffer: 3.178908 +vertex_buffer: 6.091790 +vertex_buffer: 0.391600 +vertex_buffer: 0.075000 +vertex_buffer: -7.367000 +vertex_buffer: 3.390060 +vertex_buffer: 4.917622 +vertex_buffer: 0.489800 +vertex_buffer: 0.777700 +vertex_buffer: -7.382000 +vertex_buffer: 3.380263 +vertex_buffer: 4.933475 +vertex_buffer: 0.490000 +vertex_buffer: 0.775600 +vertex_buffer: -7.358000 +vertex_buffer: 3.360794 +vertex_buffer: 4.917903 +vertex_buffer: 0.493300 +vertex_buffer: 0.776800 +vertex_buffer: -7.373000 +vertex_buffer: 3.350981 +vertex_buffer: 4.934727 +vertex_buffer: 0.493200 +vertex_buffer: 0.774800 +vertex_buffer: -7.379000 +vertex_buffer: 3.350882 +vertex_buffer: 4.963497 +vertex_buffer: 0.493000 +vertex_buffer: 0.772300 +vertex_buffer: -7.388000 +vertex_buffer: 3.380187 +vertex_buffer: 4.962238 +vertex_buffer: 0.489700 +vertex_buffer: 0.773100 +vertex_buffer: -7.316000 +vertex_buffer: 3.306611 +vertex_buffer: 5.030970 +vertex_buffer: 0.501000 +vertex_buffer: 0.764900 +vertex_buffer: -7.321000 +vertex_buffer: 3.301386 +vertex_buffer: 5.100352 +vertex_buffer: 0.500700 +vertex_buffer: 0.758500 +vertex_buffer: -7.364000 +vertex_buffer: 3.323990 +vertex_buffer: 5.087562 +vertex_buffer: 0.496000 +vertex_buffer: 0.760600 +vertex_buffer: -7.363000 +vertex_buffer: 3.331136 +vertex_buffer: 5.016309 +vertex_buffer: 0.495800 +vertex_buffer: 0.767100 +vertex_buffer: -7.387000 +vertex_buffer: 3.350724 +vertex_buffer: 5.009150 +vertex_buffer: 0.492600 +vertex_buffer: 0.768300 +vertex_buffer: -7.354000 +vertex_buffer: 3.331322 +vertex_buffer: 4.969654 +vertex_buffer: 0.496400 +vertex_buffer: 0.771300 +vertex_buffer: -7.381000 +vertex_buffer: 3.903078 +vertex_buffer: 4.948469 +vertex_buffer: 0.437000 +vertex_buffer: 0.771400 +vertex_buffer: -7.363000 +vertex_buffer: 3.890097 +vertex_buffer: 4.932051 +vertex_buffer: 0.436100 +vertex_buffer: 0.773200 +vertex_buffer: -7.386000 +vertex_buffer: 3.903040 +vertex_buffer: 4.979247 +vertex_buffer: 0.437300 +vertex_buffer: 0.768800 +vertex_buffer: -7.171000 +vertex_buffer: 3.951033 +vertex_buffer: 4.984264 +vertex_buffer: 0.421000 +vertex_buffer: 0.759500 +vertex_buffer: -7.174000 +vertex_buffer: 3.951991 +vertex_buffer: 5.020010 +vertex_buffer: 0.421400 +vertex_buffer: 0.756000 +vertex_buffer: -7.371000 +vertex_buffer: 3.911768 +vertex_buffer: 5.201848 +vertex_buffer: 0.440800 +vertex_buffer: 0.750700 +vertex_buffer: -7.153000 +vertex_buffer: 3.968528 +vertex_buffer: 5.400862 +vertex_buffer: 0.435000 +vertex_buffer: 0.725700 +vertex_buffer: -7.066000 +vertex_buffer: 3.987499 +vertex_buffer: 5.424838 +vertex_buffer: 0.431700 +vertex_buffer: 0.718800 +vertex_buffer: -1.094000 +vertex_buffer: 1.348785 +vertex_buffer: 5.935443 +vertex_buffer: 0.576900 +vertex_buffer: 0.089300 +vertex_buffer: -7.126000 +vertex_buffer: 3.076574 +vertex_buffer: 5.395181 +vertex_buffer: 0.519800 +vertex_buffer: 0.721200 +vertex_buffer: -7.164000 +vertex_buffer: 3.162683 +vertex_buffer: 5.383419 +vertex_buffer: 0.512300 +vertex_buffer: 0.726400 +vertex_buffer: -7.026000 +vertex_buffer: 3.181370 +vertex_buffer: 5.028232 +vertex_buffer: 0.530100 +vertex_buffer: 0.762800 +vertex_buffer: -7.036000 +vertex_buffer: 3.179970 +vertex_buffer: 5.079792 +vertex_buffer: 0.530200 +vertex_buffer: 0.757700 +vertex_buffer: -7.097000 +vertex_buffer: 3.186858 +vertex_buffer: 5.076458 +vertex_buffer: 0.524600 +vertex_buffer: 0.756800 +vertex_buffer: -7.087000 +vertex_buffer: 3.187309 +vertex_buffer: 5.019910 +vertex_buffer: 0.524600 +vertex_buffer: 0.762800 +vertex_buffer: -7.040000 +vertex_buffer: 3.155063 +vertex_buffer: 5.125915 +vertex_buffer: 0.531800 +vertex_buffer: 0.752500 +vertex_buffer: -7.401000 +vertex_buffer: 3.688052 +vertex_buffer: 4.968018 +vertex_buffer: 0.459500 +vertex_buffer: 0.774100 +vertex_buffer: -7.409000 +vertex_buffer: 3.687998 +vertex_buffer: 5.012717 +vertex_buffer: 0.460000 +vertex_buffer: 0.770000 +vertex_buffer: -7.415000 +vertex_buffer: 3.820997 +vertex_buffer: 5.015137 +vertex_buffer: 0.447600 +vertex_buffer: 0.768900 +vertex_buffer: -7.412000 +vertex_buffer: 3.890991 +vertex_buffer: 5.019919 +vertex_buffer: 0.440500 +vertex_buffer: 0.766700 +vertex_buffer: -7.417000 +vertex_buffer: 3.867994 +vertex_buffer: 5.016397 +vertex_buffer: 0.443000 +vertex_buffer: 0.767800 +vertex_buffer: -7.400000 +vertex_buffer: 3.476002 +vertex_buffer: 5.008861 +vertex_buffer: 0.479900 +vertex_buffer: 0.770000 +vertex_buffer: -7.404000 +vertex_buffer: 3.567002 +vertex_buffer: 5.009855 +vertex_buffer: 0.471300 +vertex_buffer: 0.770300 +vertex_buffer: -7.251000 +vertex_buffer: 3.261063 +vertex_buffer: 5.121390 +vertex_buffer: 0.508300 +vertex_buffer: 0.754900 +vertex_buffer: -7.238000 +vertex_buffer: 3.261456 +vertex_buffer: 5.054759 +vertex_buffer: 0.509400 +vertex_buffer: 0.761000 +vertex_buffer: -7.161000 +vertex_buffer: 3.216266 +vertex_buffer: 5.074730 +vertex_buffer: 0.517800 +vertex_buffer: 0.757500 +vertex_buffer: -7.398000 +vertex_buffer: 3.421141 +vertex_buffer: 5.007745 +vertex_buffer: 0.485200 +vertex_buffer: 0.769600 +vertex_buffer: -7.396000 +vertex_buffer: 3.380068 +vertex_buffer: 5.006904 +vertex_buffer: 0.489400 +vertex_buffer: 0.769100 +vertex_buffer: -7.080000 +vertex_buffer: 3.187593 +vertex_buffer: 4.984242 +vertex_buffer: 0.524700 +vertex_buffer: 0.766300 +vertex_buffer: -7.069000 +vertex_buffer: 3.189697 +vertex_buffer: 4.965413 +vertex_buffer: 0.525300 +vertex_buffer: 0.768200 +vertex_buffer: -7.030000 +vertex_buffer: 3.190637 +vertex_buffer: 4.970094 +vertex_buffer: 0.528700 +vertex_buffer: 0.768800 +vertex_buffer: -7.020000 +vertex_buffer: 3.180675 +vertex_buffer: 4.993543 +vertex_buffer: 0.530100 +vertex_buffer: 0.766400 +vertex_buffer: -7.349000 +vertex_buffer: 3.331445 +vertex_buffer: 4.938923 +vertex_buffer: 0.496500 +vertex_buffer: 0.774100 +vertex_buffer: -7.333000 +vertex_buffer: 3.341269 +vertex_buffer: 4.922087 +vertex_buffer: 0.497000 +vertex_buffer: 0.776200 +vertex_buffer: -7.345000 +vertex_buffer: 3.303906 +vertex_buffer: 5.190210 +vertex_buffer: 0.497700 +vertex_buffer: 0.751100 +vertex_buffer: -7.312000 +vertex_buffer: 3.287216 +vertex_buffer: 5.186389 +vertex_buffer: 0.501400 +vertex_buffer: 0.750500 +vertex_buffer: -7.274000 +vertex_buffer: 3.257252 +vertex_buffer: 5.266911 +vertex_buffer: 0.504300 +vertex_buffer: 0.742300 +vertex_buffer: -7.303000 +vertex_buffer: 3.270918 +vertex_buffer: 5.289551 +vertex_buffer: 0.500700 +vertex_buffer: 0.741200 +vertex_buffer: -7.214000 +vertex_buffer: 3.218464 +vertex_buffer: 5.320205 +vertex_buffer: 0.508900 +vertex_buffer: 0.735300 +vertex_buffer: -7.233000 +vertex_buffer: 3.226150 +vertex_buffer: 5.350587 +vertex_buffer: 0.505800 +vertex_buffer: 0.733100 +vertex_buffer: -7.097000 +vertex_buffer: 3.954853 +vertex_buffer: 5.132733 +vertex_buffer: 0.414800 +vertex_buffer: 0.741000 +vertex_buffer: -7.120000 +vertex_buffer: 3.964835 +vertex_buffer: 5.147107 +vertex_buffer: 0.417800 +vertex_buffer: 0.740900 +vertex_buffer: -5.117000 +vertex_buffer: 4.224341 +vertex_buffer: 5.554891 +vertex_buffer: 0.370700 +vertex_buffer: 0.547800 +vertex_buffer: -4.704000 +vertex_buffer: 4.261270 +vertex_buffer: 5.612811 +vertex_buffer: 0.363200 +vertex_buffer: 0.510100 +vertex_buffer: -4.252000 +vertex_buffer: 4.282202 +vertex_buffer: 5.668910 +vertex_buffer: 0.356400 +vertex_buffer: 0.468800 +vertex_buffer: -3.712000 +vertex_buffer: 4.282137 +vertex_buffer: 5.721675 +vertex_buffer: 0.349900 +vertex_buffer: 0.419600 +vertex_buffer: -3.035000 +vertex_buffer: 4.252083 +vertex_buffer: 5.765869 +vertex_buffer: 0.344000 +vertex_buffer: 0.358000 +vertex_buffer: -2.363000 +vertex_buffer: 4.199045 +vertex_buffer: 5.797508 +vertex_buffer: 0.339700 +vertex_buffer: 0.296700 +vertex_buffer: -1.839000 +vertex_buffer: 4.123028 +vertex_buffer: 5.812879 +vertex_buffer: 0.336700 +vertex_buffer: 0.248500 +vertex_buffer: -1.437000 +vertex_buffer: 4.047021 +vertex_buffer: 5.817625 +vertex_buffer: 0.333000 +vertex_buffer: 0.211500 +vertex_buffer: -1.131000 +vertex_buffer: 3.991018 +vertex_buffer: 5.820283 +vertex_buffer: 0.328400 +vertex_buffer: 0.183700 +vertex_buffer: -7.396000 +vertex_buffer: 3.688087 +vertex_buffer: 4.939241 +vertex_buffer: 0.459100 +vertex_buffer: 0.776700 +vertex_buffer: -7.379000 +vertex_buffer: 3.678107 +vertex_buffer: 4.922937 +vertex_buffer: 0.459600 +vertex_buffer: 0.778900 +vertex_buffer: -6.569000 +vertex_buffer: 3.989616 +vertex_buffer: 5.326786 +vertex_buffer: 0.404700 +vertex_buffer: 0.679400 +vertex_buffer: -6.544000 +vertex_buffer: 3.988624 +vertex_buffer: 5.320908 +vertex_buffer: 0.403700 +vertex_buffer: 0.677300 +vertex_buffer: -4.686000 +vertex_buffer: 4.355355 +vertex_buffer: 5.543365 +vertex_buffer: 0.345100 +vertex_buffer: 0.513400 +vertex_buffer: -5.108000 +vertex_buffer: 4.317427 +vertex_buffer: 5.483809 +vertex_buffer: 0.352600 +vertex_buffer: 0.552200 +vertex_buffer: -5.554000 +vertex_buffer: 4.253511 +vertex_buffer: 5.414423 +vertex_buffer: 0.362300 +vertex_buffer: 0.593200 +vertex_buffer: -6.072000 +vertex_buffer: 4.154618 +vertex_buffer: 5.326802 +vertex_buffer: 0.375300 +vertex_buffer: 0.640700 +vertex_buffer: -6.496000 +vertex_buffer: 4.064704 +vertex_buffer: 5.255655 +vertex_buffer: 0.387600 +vertex_buffer: 0.679600 +vertex_buffer: -6.693000 +vertex_buffer: 4.022743 +vertex_buffer: 5.223533 +vertex_buffer: 0.395400 +vertex_buffer: 0.699800 +vertex_buffer: -6.718000 +vertex_buffer: 4.016747 +vertex_buffer: 5.219308 +vertex_buffer: 0.396900 +vertex_buffer: 0.703600 +vertex_buffer: -6.765000 +vertex_buffer: 4.006758 +vertex_buffer: 5.210963 +vertex_buffer: 0.399300 +vertex_buffer: 0.709400 +vertex_buffer: -7.031000 +vertex_buffer: 3.082283 +vertex_buffer: 5.141156 +vertex_buffer: 0.536800 +vertex_buffer: 0.746500 +vertex_buffer: -6.999000 +vertex_buffer: 2.919289 +vertex_buffer: 5.142432 +vertex_buffer: 0.549900 +vertex_buffer: 0.736300 +vertex_buffer: -6.830000 +vertex_buffer: 2.206663 +vertex_buffer: 5.154284 +vertex_buffer: 0.606600 +vertex_buffer: 0.693700 +vertex_buffer: -6.668000 +vertex_buffer: 1.672600 +vertex_buffer: 5.170115 +vertex_buffer: 0.646300 +vertex_buffer: 0.658100 +vertex_buffer: -6.462000 +vertex_buffer: 1.127224 +vertex_buffer: 5.188234 +vertex_buffer: 0.684400 +vertex_buffer: 0.617400 +vertex_buffer: -6.223000 +vertex_buffer: 0.676129 +vertex_buffer: 5.207506 +vertex_buffer: 0.714300 +vertex_buffer: 0.578200 +vertex_buffer: -5.940000 +vertex_buffer: 0.312489 +vertex_buffer: 5.231991 +vertex_buffer: 0.737300 +vertex_buffer: 0.539500 +vertex_buffer: -5.597000 +vertex_buffer: 0.033719 +vertex_buffer: 5.261748 +vertex_buffer: 0.754700 +vertex_buffer: 0.499600 +vertex_buffer: -5.158000 +vertex_buffer: -0.173292 +vertex_buffer: 5.302549 +vertex_buffer: 0.767700 +vertex_buffer: 0.454100 +vertex_buffer: -4.582000 +vertex_buffer: -0.319409 +vertex_buffer: 5.355970 +vertex_buffer: 0.776700 +vertex_buffer: 0.397800 +vertex_buffer: -3.990000 +vertex_buffer: -0.400474 +vertex_buffer: 5.413808 +vertex_buffer: 0.779800 +vertex_buffer: 0.340600 +vertex_buffer: -3.501000 +vertex_buffer: -0.414360 +vertex_buffer: 5.459591 +vertex_buffer: 0.777100 +vertex_buffer: 0.293000 +vertex_buffer: -3.075000 +vertex_buffer: -0.373539 +vertex_buffer: 5.500115 +vertex_buffer: 0.769700 +vertex_buffer: 0.251500 +vertex_buffer: -2.674000 +vertex_buffer: -0.292200 +vertex_buffer: 5.537738 +vertex_buffer: 0.757200 +vertex_buffer: 0.212800 +vertex_buffer: -2.308000 +vertex_buffer: -0.159513 +vertex_buffer: 5.575268 +vertex_buffer: 0.740100 +vertex_buffer: 0.177600 +vertex_buffer: -1.986000 +vertex_buffer: 0.034865 +vertex_buffer: 5.609689 +vertex_buffer: 0.718400 +vertex_buffer: 0.146300 +vertex_buffer: -1.707000 +vertex_buffer: 0.305900 +vertex_buffer: 5.618189 +vertex_buffer: 0.691800 +vertex_buffer: 0.117600 +vertex_buffer: -1.471000 +vertex_buffer: 0.665717 +vertex_buffer: 5.575468 +vertex_buffer: 0.659200 +vertex_buffer: 0.090100 +vertex_buffer: -1.264000 +vertex_buffer: 1.088916 +vertex_buffer: 5.518081 +vertex_buffer: 0.620700 +vertex_buffer: 0.065000 +vertex_buffer: -0.922999 +vertex_buffer: 1.947676 +vertex_buffer: 5.467464 +vertex_buffer: 0.538400 +vertex_buffer: 0.032000 +vertex_buffer: -0.847000 +vertex_buffer: 2.170980 +vertex_buffer: 5.465208 +vertex_buffer: 0.515400 +vertex_buffer: 0.027100 +vertex_buffer: -0.807999 +vertex_buffer: 2.157453 +vertex_buffer: 5.482841 +vertex_buffer: 0.514700 +vertex_buffer: 0.030900 +vertex_buffer: -0.796000 +vertex_buffer: 2.333566 +vertex_buffer: 5.498341 +vertex_buffer: 0.497900 +vertex_buffer: 0.026800 +vertex_buffer: -0.719000 +vertex_buffer: 2.548583 +vertex_buffer: 5.588669 +vertex_buffer: 0.474800 +vertex_buffer: 0.031000 +vertex_buffer: -0.631000 +vertex_buffer: 2.767489 +vertex_buffer: 5.684450 +vertex_buffer: 0.452100 +vertex_buffer: 0.036800 +vertex_buffer: -0.313999 +vertex_buffer: 3.115831 +vertex_buffer: 5.733213 +vertex_buffer: 0.412000 +vertex_buffer: 0.031500 +vertex_buffer: -0.450000 +vertex_buffer: 3.063987 +vertex_buffer: 5.729444 +vertex_buffer: 0.423000 +vertex_buffer: 0.035000 +vertex_buffer: -0.182000 +vertex_buffer: 3.129776 +vertex_buffer: 5.735759 +vertex_buffer: 0.401700 +vertex_buffer: 0.027400 +vertex_buffer: -0.160685 +vertex_buffer: 3.986409 +vertex_buffer: 5.740629 +vertex_buffer: 0.284300 +vertex_buffer: 0.106000 +vertex_buffer: -0.309999 +vertex_buffer: 3.994118 +vertex_buffer: 5.738694 +vertex_buffer: 0.288800 +vertex_buffer: 0.118600 +vertex_buffer: -0.557000 +vertex_buffer: 4.010119 +vertex_buffer: 5.737586 +vertex_buffer: 0.295500 +vertex_buffer: 0.139600 +vertex_buffer: -0.884999 +vertex_buffer: 4.051117 +vertex_buffer: 5.738481 +vertex_buffer: 0.303600 +vertex_buffer: 0.168000 +vertex_buffer: -1.100000 +vertex_buffer: 4.085115 +vertex_buffer: 5.740980 +vertex_buffer: 0.308100 +vertex_buffer: 0.186900 +vertex_buffer: -1.408000 +vertex_buffer: 4.138110 +vertex_buffer: 5.742989 +vertex_buffer: 0.313100 +vertex_buffer: 0.214600 +vertex_buffer: -1.825000 +vertex_buffer: 4.212113 +vertex_buffer: 5.742072 +vertex_buffer: 0.317400 +vertex_buffer: 0.252500 +vertex_buffer: -2.364000 +vertex_buffer: 4.287131 +vertex_buffer: 5.725549 +vertex_buffer: 0.321000 +vertex_buffer: 0.301800 +vertex_buffer: -3.039000 +vertex_buffer: 4.343173 +vertex_buffer: 5.693288 +vertex_buffer: 0.325800 +vertex_buffer: 0.363200 +vertex_buffer: -3.712000 +vertex_buffer: 4.374225 +vertex_buffer: 5.649136 +vertex_buffer: 0.332200 +vertex_buffer: 0.424300 +vertex_buffer: -4.241000 +vertex_buffer: 4.376288 +vertex_buffer: 5.597692 +vertex_buffer: 0.338400 +vertex_buffer: 0.472600 +vertex_buffer: -7.047000 +vertex_buffer: 3.937823 +vertex_buffer: 5.156792 +vertex_buffer: 0.410100 +vertex_buffer: 0.735500 +vertex_buffer: -6.661000 +vertex_buffer: 4.029736 +vertex_buffer: 5.228822 +vertex_buffer: 0.393600 +vertex_buffer: 0.695500 +vertex_buffer: -6.937000 +vertex_buffer: 2.624609 +vertex_buffer: 5.145157 +vertex_buffer: 0.573600 +vertex_buffer: 0.719000 +vertex_buffer: -0.549000 +vertex_buffer: 2.943340 +vertex_buffer: 5.723495 +vertex_buffer: 0.435100 +vertex_buffer: 0.038000 +vertex_buffer: -7.092000 +vertex_buffer: 3.924858 +vertex_buffer: 5.128843 +vertex_buffer: 0.412000 +vertex_buffer: 0.741800 +vertex_buffer: -7.111000 +vertex_buffer: 3.918920 +vertex_buffer: 5.078127 +vertex_buffer: 0.412900 +vertex_buffer: 0.748000 +vertex_buffer: -7.115000 +vertex_buffer: 3.949918 +vertex_buffer: 5.079048 +vertex_buffer: 0.415800 +vertex_buffer: 0.747800 +vertex_buffer: -7.113000 +vertex_buffer: 3.917983 +vertex_buffer: 5.025514 +vertex_buffer: 0.413300 +vertex_buffer: 0.753900 +vertex_buffer: -7.118000 +vertex_buffer: 3.948984 +vertex_buffer: 5.024453 +vertex_buffer: 0.416100 +vertex_buffer: 0.753900 +vertex_buffer: -7.349000 +vertex_buffer: 3.848097 +vertex_buffer: 4.931756 +vertex_buffer: 0.437800 +vertex_buffer: 0.777500 +vertex_buffer: -7.355000 +vertex_buffer: 3.828099 +vertex_buffer: 4.930316 +vertex_buffer: 0.442100 +vertex_buffer: 0.779000 +vertex_buffer: -7.351000 +vertex_buffer: 3.790099 +vertex_buffer: 4.929286 +vertex_buffer: 0.447600 +vertex_buffer: 0.780600 +vertex_buffer: -7.330000 +vertex_buffer: 3.384200 +vertex_buffer: 4.920932 +vertex_buffer: 0.493500 +vertex_buffer: 0.779900 +vertex_buffer: -7.307000 +vertex_buffer: 3.363702 +vertex_buffer: 4.924161 +vertex_buffer: 0.498000 +vertex_buffer: 0.779100 +vertex_buffer: -7.257000 +vertex_buffer: 3.334402 +vertex_buffer: 4.931817 +vertex_buffer: 0.504300 +vertex_buffer: 0.777500 +vertex_buffer: -7.344000 +vertex_buffer: 3.549107 +vertex_buffer: 4.923484 +vertex_buffer: 0.472400 +vertex_buffer: 0.782100 +vertex_buffer: -7.342000 +vertex_buffer: 3.491108 +vertex_buffer: 4.922235 +vertex_buffer: 0.479000 +vertex_buffer: 0.781900 +vertex_buffer: -7.137000 +vertex_buffer: 3.943048 +vertex_buffer: 4.972868 +vertex_buffer: 0.418000 +vertex_buffer: 0.760000 +vertex_buffer: -7.120000 +vertex_buffer: 3.945038 +vertex_buffer: 4.979739 +vertex_buffer: 0.416700 +vertex_buffer: 0.758700 +vertex_buffer: -7.116000 +vertex_buffer: 3.915038 +vertex_buffer: 4.979849 +vertex_buffer: 0.414600 +vertex_buffer: 0.760100 +vertex_buffer: -7.136000 +vertex_buffer: 3.910048 +vertex_buffer: 4.971928 +vertex_buffer: 0.416500 +vertex_buffer: 0.762500 +vertex_buffer: -7.167000 +vertex_buffer: 3.903056 +vertex_buffer: 4.966033 +vertex_buffer: 0.419600 +vertex_buffer: 0.764900 +vertex_buffer: -7.168000 +vertex_buffer: 3.936054 +vertex_buffer: 4.966967 +vertex_buffer: 0.420700 +vertex_buffer: 0.762000 +vertex_buffer: -7.340000 +vertex_buffer: 3.445111 +vertex_buffer: 4.921282 +vertex_buffer: 0.484500 +vertex_buffer: 0.781500 +vertex_buffer: -7.339000 +vertex_buffer: 3.410543 +vertex_buffer: 4.919605 +vertex_buffer: 0.489200 +vertex_buffer: 0.780700 +vertex_buffer: -7.081000 +vertex_buffer: 3.225890 +vertex_buffer: 4.958372 +vertex_buffer: 0.523200 +vertex_buffer: 0.771200 +vertex_buffer: -7.329000 +vertex_buffer: 3.862093 +vertex_buffer: 4.935845 +vertex_buffer: 0.433300 +vertex_buffer: 0.775300 +vertex_buffer: -7.348000 +vertex_buffer: 3.655105 +vertex_buffer: 4.925934 +vertex_buffer: 0.461400 +vertex_buffer: 0.781700 +vertex_buffer: -7.266000 +vertex_buffer: 3.262676 +vertex_buffer: 5.181256 +vertex_buffer: 0.506400 +vertex_buffer: 0.749800 +vertex_buffer: -7.226000 +vertex_buffer: 3.234846 +vertex_buffer: 5.228929 +vertex_buffer: 0.510100 +vertex_buffer: 0.744300 +vertex_buffer: -7.182000 +vertex_buffer: 3.205980 +vertex_buffer: 5.273643 +vertex_buffer: 0.513500 +vertex_buffer: 0.738500 +vertex_buffer: -1.142000 +vertex_buffer: 1.341919 +vertex_buffer: 6.002557 +vertex_buffer: 0.575400 +vertex_buffer: 0.095400 +vertex_buffer: -1.118000 +vertex_buffer: 1.338903 +vertex_buffer: 6.002359 +vertex_buffer: 0.575400 +vertex_buffer: 0.093900 +vertex_buffer: -1.103000 +vertex_buffer: 1.337232 +vertex_buffer: 5.996234 +vertex_buffer: 0.575600 +vertex_buffer: 0.093100 +vertex_buffer: -0.806000 +vertex_buffer: 2.138176 +vertex_buffer: 5.548845 +vertex_buffer: 0.514500 +vertex_buffer: 0.036600 +vertex_buffer: -1.367000 +vertex_buffer: 1.388032 +vertex_buffer: 5.704676 +vertex_buffer: 0.568800 +vertex_buffer: 0.139600 +vertex_buffer: -1.098000 +vertex_buffer: 1.337325 +vertex_buffer: 5.977184 +vertex_buffer: 0.576200 +vertex_buffer: 0.092200 +vertex_buffer: -1.332000 +vertex_buffer: 1.365340 +vertex_buffer: 5.998475 +vertex_buffer: 0.575300 +vertex_buffer: 0.112000 +vertex_buffer: -1.071000 +vertex_buffer: 1.551685 +vertex_buffer: 5.483383 +vertex_buffer: 0.577000 +vertex_buffer: 0.044800 +vertex_buffer: -7.168000 +vertex_buffer: 3.279676 +vertex_buffer: 4.945478 +vertex_buffer: 0.514000 +vertex_buffer: 0.774200 +vertex_buffer: -7.141000 +vertex_buffer: 3.958912 +vertex_buffer: 5.084465 +vertex_buffer: 0.418600 +vertex_buffer: 0.748300 +vertex_buffer: -7.142000 +vertex_buffer: 3.957986 +vertex_buffer: 5.022904 +vertex_buffer: 0.418500 +vertex_buffer: 0.754600 +vertex_buffer: -7.112000 +vertex_buffer: 3.917019 +vertex_buffer: 4.995733 +vertex_buffer: 0.413700 +vertex_buffer: 0.757700 +vertex_buffer: -7.116000 +vertex_buffer: 3.949022 +vertex_buffer: 4.993701 +vertex_buffer: 0.416400 +vertex_buffer: 0.757200 +vertex_buffer: -7.141000 +vertex_buffer: 3.958028 +vertex_buffer: 4.989184 +vertex_buffer: 0.418500 +vertex_buffer: 0.758000 +vertex_buffer: -0.573225 +vertex_buffer: 3.334350 +vertex_buffer: 6.083103 +vertex_buffer: 0.387400 +vertex_buffer: 0.102200 +vertex_buffer: -0.323866 +vertex_buffer: 3.364760 +vertex_buffer: 6.099101 +vertex_buffer: 0.376200 +vertex_buffer: 0.082600 +vertex_buffer: -0.167873 +vertex_buffer: 3.366989 +vertex_buffer: 6.104354 +vertex_buffer: 0.368800 +vertex_buffer: 0.070800 +vertex_buffer: 0.317000 +vertex_buffer: 4.024064 +vertex_buffer: 5.899689 +vertex_buffer: 0.113300 +vertex_buffer: 0.743500 +vertex_buffer: 0.317000 +vertex_buffer: 4.026103 +vertex_buffer: 5.867753 +vertex_buffer: 0.115200 +vertex_buffer: 0.743500 +vertex_buffer: -0.000000 +vertex_buffer: 4.013804 +vertex_buffer: 5.874567 +vertex_buffer: 0.115200 +vertex_buffer: 0.764900 +vertex_buffer: -0.000000 +vertex_buffer: 4.010763 +vertex_buffer: 5.907621 +vertex_buffer: 0.113300 +vertex_buffer: 0.764900 +vertex_buffer: 0.317000 +vertex_buffer: 4.024140 +vertex_buffer: 5.836836 +vertex_buffer: 0.117100 +vertex_buffer: 0.743500 +vertex_buffer: -0.000000 +vertex_buffer: 4.010841 +vertex_buffer: 5.842498 +vertex_buffer: 0.117100 +vertex_buffer: 0.764900 +vertex_buffer: 1.083000 +vertex_buffer: 4.115381 +vertex_buffer: 5.880229 +vertex_buffer: 0.113300 +vertex_buffer: 0.693300 +vertex_buffer: 1.083000 +vertex_buffer: 4.118413 +vertex_buffer: 5.852974 +vertex_buffer: 0.115200 +vertex_buffer: 0.693300 +vertex_buffer: 0.670000 +vertex_buffer: 4.054759 +vertex_buffer: 5.859921 +vertex_buffer: 0.115200 +vertex_buffer: 0.720700 +vertex_buffer: 0.670000 +vertex_buffer: 4.051723 +vertex_buffer: 5.889535 +vertex_buffer: 0.113300 +vertex_buffer: 0.720700 +vertex_buffer: 1.082000 +vertex_buffer: 4.115445 +vertex_buffer: 5.826654 +vertex_buffer: 0.117100 +vertex_buffer: 0.693300 +vertex_buffer: 0.669000 +vertex_buffer: 4.051794 +vertex_buffer: 5.831267 +vertex_buffer: 0.117100 +vertex_buffer: 0.720700 +vertex_buffer: 1.465000 +vertex_buffer: 4.182078 +vertex_buffer: 5.872258 +vertex_buffer: 0.113300 +vertex_buffer: 0.667700 +vertex_buffer: 1.464000 +vertex_buffer: 4.185110 +vertex_buffer: 5.847359 +vertex_buffer: 0.115200 +vertex_buffer: 0.667700 +vertex_buffer: 1.464000 +vertex_buffer: 4.182139 +vertex_buffer: 5.822321 +vertex_buffer: 0.117100 +vertex_buffer: 0.667700 +vertex_buffer: 1.843000 +vertex_buffer: 4.247624 +vertex_buffer: 5.863575 +vertex_buffer: 0.113300 +vertex_buffer: 0.642000 +vertex_buffer: 1.842000 +vertex_buffer: 4.250652 +vertex_buffer: 5.839960 +vertex_buffer: 0.115200 +vertex_buffer: 0.642000 +vertex_buffer: 1.841000 +vertex_buffer: 4.247682 +vertex_buffer: 5.816204 +vertex_buffer: 0.117100 +vertex_buffer: 0.642000 +vertex_buffer: 2.366000 +vertex_buffer: 4.322475 +vertex_buffer: 5.821735 +vertex_buffer: 0.115200 +vertex_buffer: 0.606500 +vertex_buffer: 2.364000 +vertex_buffer: 4.319502 +vertex_buffer: 5.798289 +vertex_buffer: 0.117100 +vertex_buffer: 0.606500 +vertex_buffer: 2.367000 +vertex_buffer: 4.319444 +vertex_buffer: 5.846098 +vertex_buffer: 0.113300 +vertex_buffer: 0.606500 +vertex_buffer: 3.039000 +vertex_buffer: 4.372745 +vertex_buffer: 5.811448 +vertex_buffer: 0.113300 +vertex_buffer: 0.561200 +vertex_buffer: 3.038000 +vertex_buffer: 4.375773 +vertex_buffer: 5.788562 +vertex_buffer: 0.115200 +vertex_buffer: 0.561200 +vertex_buffer: 3.036000 +vertex_buffer: 4.372801 +vertex_buffer: 5.765585 +vertex_buffer: 0.117100 +vertex_buffer: 0.561200 +vertex_buffer: 3.716000 +vertex_buffer: 4.406310 +vertex_buffer: 5.744192 +vertex_buffer: 0.115200 +vertex_buffer: 0.515600 +vertex_buffer: 3.714000 +vertex_buffer: 4.403336 +vertex_buffer: 5.721646 +vertex_buffer: 0.117100 +vertex_buffer: 0.515600 +vertex_buffer: 3.718000 +vertex_buffer: 4.403282 +vertex_buffer: 5.765681 +vertex_buffer: 0.113300 +vertex_buffer: 0.515600 +vertex_buffer: 4.259000 +vertex_buffer: 4.404346 +vertex_buffer: 5.714486 +vertex_buffer: 0.113300 +vertex_buffer: 0.479100 +vertex_buffer: 4.257000 +vertex_buffer: 4.407372 +vertex_buffer: 5.693258 +vertex_buffer: 0.115200 +vertex_buffer: 0.479100 +vertex_buffer: 4.254000 +vertex_buffer: 4.404398 +vertex_buffer: 5.671983 +vertex_buffer: 0.117100 +vertex_buffer: 0.479100 +vertex_buffer: 4.709000 +vertex_buffer: 4.387179 +vertex_buffer: 5.639311 +vertex_buffer: 0.115200 +vertex_buffer: 0.448800 +vertex_buffer: 4.706000 +vertex_buffer: 4.384205 +vertex_buffer: 5.617180 +vertex_buffer: 0.117100 +vertex_buffer: 0.448800 +vertex_buffer: 4.711000 +vertex_buffer: 4.384153 +vertex_buffer: 5.660390 +vertex_buffer: 0.113300 +vertex_buffer: 0.448800 +vertex_buffer: 5.122000 +vertex_buffer: 4.352591 +vertex_buffer: 5.582086 +vertex_buffer: 0.115200 +vertex_buffer: 0.421000 +vertex_buffer: 5.125000 +vertex_buffer: 4.349563 +vertex_buffer: 5.603030 +vertex_buffer: 0.113300 +vertex_buffer: 0.421000 +vertex_buffer: 5.119000 +vertex_buffer: 4.349615 +vertex_buffer: 5.561021 +vertex_buffer: 0.117100 +vertex_buffer: 0.421000 +vertex_buffer: 5.542000 +vertex_buffer: 4.293632 +vertex_buffer: 5.516748 +vertex_buffer: 0.115200 +vertex_buffer: 0.392600 +vertex_buffer: 5.539000 +vertex_buffer: 4.291658 +vertex_buffer: 5.495723 +vertex_buffer: 0.117100 +vertex_buffer: 0.392600 +vertex_buffer: 5.545000 +vertex_buffer: 4.290609 +vertex_buffer: 5.537658 +vertex_buffer: 0.113300 +vertex_buffer: 0.392600 +vertex_buffer: 6.018000 +vertex_buffer: 4.203568 +vertex_buffer: 5.459014 +vertex_buffer: 0.113300 +vertex_buffer: 0.360400 +vertex_buffer: 6.015000 +vertex_buffer: 4.205595 +vertex_buffer: 5.438093 +vertex_buffer: 0.115200 +vertex_buffer: 0.360400 +vertex_buffer: 6.012000 +vertex_buffer: 4.203619 +vertex_buffer: 5.417067 +vertex_buffer: 0.117100 +vertex_buffer: 0.360400 +vertex_buffer: 6.398000 +vertex_buffer: 4.123437 +vertex_buffer: 5.353050 +vertex_buffer: 0.117100 +vertex_buffer: 0.333900 +vertex_buffer: 6.402000 +vertex_buffer: 4.126411 +vertex_buffer: 5.374100 +vertex_buffer: 0.115200 +vertex_buffer: 0.333900 +vertex_buffer: 6.405000 +vertex_buffer: 4.123386 +vertex_buffer: 5.394981 +vertex_buffer: 0.113300 +vertex_buffer: 0.333900 +vertex_buffer: 0.317000 +vertex_buffer: 4.016156 +vertex_buffer: 5.825611 +vertex_buffer: 0.117800 +vertex_buffer: 0.743500 +vertex_buffer: -0.000000 +vertex_buffer: 4.002857 +vertex_buffer: 5.831279 +vertex_buffer: 0.117800 +vertex_buffer: 0.764900 +vertex_buffer: -0.000000 +vertex_buffer: 3.981855 +vertex_buffer: 5.830786 +vertex_buffer: 0.119200 +vertex_buffer: 0.764900 +vertex_buffer: 0.318000 +vertex_buffer: 3.995154 +vertex_buffer: 5.825060 +vertex_buffer: 0.119200 +vertex_buffer: 0.743500 +vertex_buffer: 0.317000 +vertex_buffer: 4.016054 +vertex_buffer: 5.909416 +vertex_buffer: 0.112500 +vertex_buffer: 0.743500 +vertex_buffer: -0.000000 +vertex_buffer: 4.002751 +vertex_buffer: 5.918476 +vertex_buffer: 0.112500 +vertex_buffer: 0.764900 +vertex_buffer: -0.000000 +vertex_buffer: 3.981750 +vertex_buffer: 5.917982 +vertex_buffer: 0.111100 +vertex_buffer: 0.764900 +vertex_buffer: 0.318999 +vertex_buffer: 3.995053 +vertex_buffer: 5.908851 +vertex_buffer: 0.111100 +vertex_buffer: 0.743500 +vertex_buffer: 1.082000 +vertex_buffer: 4.107457 +vertex_buffer: 5.816597 +vertex_buffer: 0.117800 +vertex_buffer: 0.693300 +vertex_buffer: 0.669000 +vertex_buffer: 4.043805 +vertex_buffer: 5.821159 +vertex_buffer: 0.117800 +vertex_buffer: 0.720700 +vertex_buffer: 1.082000 +vertex_buffer: 4.086458 +vertex_buffer: 5.816010 +vertex_buffer: 0.119200 +vertex_buffer: 0.693300 +vertex_buffer: 0.669000 +vertex_buffer: 4.022807 +vertex_buffer: 5.820588 +vertex_buffer: 0.119200 +vertex_buffer: 0.720700 +vertex_buffer: 1.083000 +vertex_buffer: 4.107370 +vertex_buffer: 5.888755 +vertex_buffer: 0.112500 +vertex_buffer: 0.693300 +vertex_buffer: 0.670000 +vertex_buffer: 4.043711 +vertex_buffer: 5.899221 +vertex_buffer: 0.112500 +vertex_buffer: 0.720700 +vertex_buffer: 1.083000 +vertex_buffer: 4.086370 +vertex_buffer: 5.888166 +vertex_buffer: 0.111100 +vertex_buffer: 0.693300 +vertex_buffer: 0.670000 +vertex_buffer: 4.022712 +vertex_buffer: 5.898648 +vertex_buffer: 0.111100 +vertex_buffer: 0.720700 +vertex_buffer: 1.463000 +vertex_buffer: 4.174151 +vertex_buffer: 5.813412 +vertex_buffer: 0.117800 +vertex_buffer: 0.667700 +vertex_buffer: 1.463000 +vertex_buffer: 4.153151 +vertex_buffer: 5.812834 +vertex_buffer: 0.119200 +vertex_buffer: 0.667700 +vertex_buffer: 1.465000 +vertex_buffer: 4.174070 +vertex_buffer: 5.879640 +vertex_buffer: 0.112500 +vertex_buffer: 0.667700 +vertex_buffer: 1.465000 +vertex_buffer: 4.153070 +vertex_buffer: 5.879068 +vertex_buffer: 0.111100 +vertex_buffer: 0.667700 +vertex_buffer: 2.364000 +vertex_buffer: 4.311512 +vertex_buffer: 5.790699 +vertex_buffer: 0.117800 +vertex_buffer: 0.606500 +vertex_buffer: 1.841000 +vertex_buffer: 4.239691 +vertex_buffer: 5.807399 +vertex_buffer: 0.117800 +vertex_buffer: 0.642000 +vertex_buffer: 1.840000 +vertex_buffer: 4.218691 +vertex_buffer: 5.806841 +vertex_buffer: 0.119200 +vertex_buffer: 0.642000 +vertex_buffer: 2.363000 +vertex_buffer: 4.291511 +vertex_buffer: 5.790249 +vertex_buffer: 0.119200 +vertex_buffer: 0.606500 +vertex_buffer: 1.843000 +vertex_buffer: 4.239612 +vertex_buffer: 5.871983 +vertex_buffer: 0.112500 +vertex_buffer: 0.642000 +vertex_buffer: 2.367000 +vertex_buffer: 4.311437 +vertex_buffer: 5.853360 +vertex_buffer: 0.112500 +vertex_buffer: 0.606500 +vertex_buffer: 2.367000 +vertex_buffer: 4.291435 +vertex_buffer: 5.852943 +vertex_buffer: 0.111100 +vertex_buffer: 0.606500 +vertex_buffer: 1.843000 +vertex_buffer: 4.218612 +vertex_buffer: 5.871453 +vertex_buffer: 0.111100 +vertex_buffer: 0.642000 +vertex_buffer: 3.713000 +vertex_buffer: 4.395346 +vertex_buffer: 5.714444 +vertex_buffer: 0.117800 +vertex_buffer: 0.515600 +vertex_buffer: 3.035000 +vertex_buffer: 4.364810 +vertex_buffer: 5.758161 +vertex_buffer: 0.117800 +vertex_buffer: 0.561200 +vertex_buffer: 3.035000 +vertex_buffer: 4.344810 +vertex_buffer: 5.757899 +vertex_buffer: 0.119200 +vertex_buffer: 0.561200 +vertex_buffer: 3.713000 +vertex_buffer: 4.375346 +vertex_buffer: 5.714359 +vertex_buffer: 0.119200 +vertex_buffer: 0.515600 +vertex_buffer: 3.040000 +vertex_buffer: 4.364735 +vertex_buffer: 5.819693 +vertex_buffer: 0.112500 +vertex_buffer: 0.561200 +vertex_buffer: 3.718000 +vertex_buffer: 4.395276 +vertex_buffer: 5.772814 +vertex_buffer: 0.112500 +vertex_buffer: 0.515600 +vertex_buffer: 3.039000 +vertex_buffer: 4.344735 +vertex_buffer: 5.819412 +vertex_buffer: 0.111100 +vertex_buffer: 0.561200 +vertex_buffer: 3.718000 +vertex_buffer: 4.375276 +vertex_buffer: 5.772729 +vertex_buffer: 0.111100 +vertex_buffer: 0.515600 +vertex_buffer: 4.705000 +vertex_buffer: 4.376215 +vertex_buffer: 5.610039 +vertex_buffer: 0.117800 +vertex_buffer: 0.448800 +vertex_buffer: 4.253000 +vertex_buffer: 4.396406 +vertex_buffer: 5.664846 +vertex_buffer: 0.117800 +vertex_buffer: 0.479100 +vertex_buffer: 4.253000 +vertex_buffer: 4.375406 +vertex_buffer: 5.664687 +vertex_buffer: 0.119200 +vertex_buffer: 0.479100 +vertex_buffer: 4.705000 +vertex_buffer: 4.355214 +vertex_buffer: 5.609741 +vertex_buffer: 0.119200 +vertex_buffer: 0.448800 +vertex_buffer: 4.259000 +vertex_buffer: 4.396337 +vertex_buffer: 5.721516 +vertex_buffer: 0.112500 +vertex_buffer: 0.479100 +vertex_buffer: 4.712000 +vertex_buffer: 4.376145 +vertex_buffer: 5.667303 +vertex_buffer: 0.112500 +vertex_buffer: 0.448800 +vertex_buffer: 4.259000 +vertex_buffer: 4.375338 +vertex_buffer: 5.721359 +vertex_buffer: 0.111100 +vertex_buffer: 0.479100 +vertex_buffer: 4.711000 +vertex_buffer: 4.355145 +vertex_buffer: 5.667006 +vertex_buffer: 0.111100 +vertex_buffer: 0.448800 +vertex_buffer: 5.118000 +vertex_buffer: 4.341623 +vertex_buffer: 5.553867 +vertex_buffer: 0.117800 +vertex_buffer: 0.421000 +vertex_buffer: 5.538000 +vertex_buffer: 4.282668 +vertex_buffer: 5.488510 +vertex_buffer: 0.117800 +vertex_buffer: 0.392600 +vertex_buffer: 5.118000 +vertex_buffer: 4.320625 +vertex_buffer: 5.553451 +vertex_buffer: 0.119200 +vertex_buffer: 0.421000 +vertex_buffer: 5.537000 +vertex_buffer: 4.262668 +vertex_buffer: 5.488038 +vertex_buffer: 0.119200 +vertex_buffer: 0.392600 +vertex_buffer: 5.125000 +vertex_buffer: 4.341555 +vertex_buffer: 5.609896 +vertex_buffer: 0.112500 +vertex_buffer: 0.421000 +vertex_buffer: 5.546000 +vertex_buffer: 4.282600 +vertex_buffer: 5.544456 +vertex_buffer: 0.112500 +vertex_buffer: 0.392600 +vertex_buffer: 5.125000 +vertex_buffer: 4.320557 +vertex_buffer: 5.609480 +vertex_buffer: 0.111100 +vertex_buffer: 0.421000 +vertex_buffer: 5.545000 +vertex_buffer: 4.262599 +vertex_buffer: 5.544984 +vertex_buffer: 0.111100 +vertex_buffer: 0.392600 +vertex_buffer: 6.011000 +vertex_buffer: 4.195629 +vertex_buffer: 5.409849 +vertex_buffer: 0.117800 +vertex_buffer: 0.360400 +vertex_buffer: 6.397000 +vertex_buffer: 4.115445 +vertex_buffer: 5.345820 +vertex_buffer: 0.117800 +vertex_buffer: 0.333900 +vertex_buffer: 6.010000 +vertex_buffer: 4.174628 +vertex_buffer: 5.409264 +vertex_buffer: 0.119200 +vertex_buffer: 0.360400 +vertex_buffer: 6.396000 +vertex_buffer: 4.094446 +vertex_buffer: 5.345189 +vertex_buffer: 0.119200 +vertex_buffer: 0.333900 +vertex_buffer: 6.019000 +vertex_buffer: 4.194561 +vertex_buffer: 5.465749 +vertex_buffer: 0.112500 +vertex_buffer: 0.360400 +vertex_buffer: 6.406000 +vertex_buffer: 4.115377 +vertex_buffer: 5.401730 +vertex_buffer: 0.112500 +vertex_buffer: 0.333900 +vertex_buffer: 6.018000 +vertex_buffer: 4.174561 +vertex_buffer: 5.465191 +vertex_buffer: 0.111100 +vertex_buffer: 0.360400 +vertex_buffer: 6.405000 +vertex_buffer: 4.094377 +vertex_buffer: 5.401098 +vertex_buffer: 0.111100 +vertex_buffer: 0.333900 +vertex_buffer: 6.663430 +vertex_buffer: 4.069083 +vertex_buffer: 5.334795 +vertex_buffer: 0.115200 +vertex_buffer: 0.318300 +vertex_buffer: 6.667440 +vertex_buffer: 4.066058 +vertex_buffer: 5.355673 +vertex_buffer: 0.113300 +vertex_buffer: 0.318300 +vertex_buffer: 6.660440 +vertex_buffer: 4.066109 +vertex_buffer: 5.313739 +vertex_buffer: 0.117100 +vertex_buffer: 0.318300 +vertex_buffer: 6.658440 +vertex_buffer: 4.058116 +vertex_buffer: 5.306510 +vertex_buffer: 0.117800 +vertex_buffer: 0.318300 +vertex_buffer: 6.657430 +vertex_buffer: 4.038118 +vertex_buffer: 5.305898 +vertex_buffer: 0.119200 +vertex_buffer: 0.318300 +vertex_buffer: 6.402000 +vertex_buffer: 4.002382 +vertex_buffer: 5.398364 +vertex_buffer: 0.104900 +vertex_buffer: 0.333900 +vertex_buffer: 6.663430 +vertex_buffer: 3.946052 +vertex_buffer: 5.360036 +vertex_buffer: 0.104900 +vertex_buffer: 0.318300 +vertex_buffer: 6.667440 +vertex_buffer: 4.037049 +vertex_buffer: 5.361777 +vertex_buffer: 0.111100 +vertex_buffer: 0.318300 +vertex_buffer: 6.683440 +vertex_buffer: 4.032087 +vertex_buffer: 5.330598 +vertex_buffer: 0.115200 +vertex_buffer: 0.315300 +vertex_buffer: 6.682430 +vertex_buffer: 4.033060 +vertex_buffer: 5.352612 +vertex_buffer: 0.113300 +vertex_buffer: 0.315300 +vertex_buffer: 6.678430 +vertex_buffer: 3.942064 +vertex_buffer: 5.349873 +vertex_buffer: 0.113300 +vertex_buffer: 0.309200 +vertex_buffer: 6.680430 +vertex_buffer: 3.940090 +vertex_buffer: 5.327828 +vertex_buffer: 0.115200 +vertex_buffer: 0.309200 +vertex_buffer: 6.671430 +vertex_buffer: 3.942115 +vertex_buffer: 5.307944 +vertex_buffer: 0.117100 +vertex_buffer: 0.309200 +vertex_buffer: 6.675430 +vertex_buffer: 4.034112 +vertex_buffer: 5.310711 +vertex_buffer: 0.117100 +vertex_buffer: 0.315300 +vertex_buffer: -0.000000 +vertex_buffer: 3.889860 +vertex_buffer: 5.828691 +vertex_buffer: 0.125300 +vertex_buffer: 0.764900 +vertex_buffer: 0.323999 +vertex_buffer: 3.891866 +vertex_buffer: 5.822424 +vertex_buffer: 0.125300 +vertex_buffer: 0.743500 +vertex_buffer: -0.000000 +vertex_buffer: 3.889754 +vertex_buffer: 5.915883 +vertex_buffer: 0.104900 +vertex_buffer: 0.764900 +vertex_buffer: 0.327999 +vertex_buffer: 3.891764 +vertex_buffer: 5.906166 +vertex_buffer: 0.104900 +vertex_buffer: 0.743500 +vertex_buffer: 1.081000 +vertex_buffer: 3.967878 +vertex_buffer: 5.812775 +vertex_buffer: 0.125300 +vertex_buffer: 0.693300 +vertex_buffer: 0.669000 +vertex_buffer: 3.913872 +vertex_buffer: 5.817708 +vertex_buffer: 0.125300 +vertex_buffer: 0.720700 +vertex_buffer: 0.672999 +vertex_buffer: 3.913777 +vertex_buffer: 5.895747 +vertex_buffer: 0.104900 +vertex_buffer: 0.720700 +vertex_buffer: 1.083000 +vertex_buffer: 3.967790 +vertex_buffer: 5.884938 +vertex_buffer: 0.104900 +vertex_buffer: 0.693300 +vertex_buffer: 1.462000 +vertex_buffer: 4.038882 +vertex_buffer: 5.809780 +vertex_buffer: 0.125300 +vertex_buffer: 0.667700 +vertex_buffer: 1.464000 +vertex_buffer: 4.038802 +vertex_buffer: 5.876020 +vertex_buffer: 0.104900 +vertex_buffer: 0.667700 +vertex_buffer: 1.840000 +vertex_buffer: 4.110889 +vertex_buffer: 5.804165 +vertex_buffer: 0.125300 +vertex_buffer: 0.642000 +vertex_buffer: 2.362000 +vertex_buffer: 4.186910 +vertex_buffer: 5.788058 +vertex_buffer: 0.125300 +vertex_buffer: 0.606500 +vertex_buffer: 1.842000 +vertex_buffer: 4.110811 +vertex_buffer: 5.868769 +vertex_buffer: 0.104900 +vertex_buffer: 0.642000 +vertex_buffer: 2.366000 +vertex_buffer: 4.186833 +vertex_buffer: 5.850768 +vertex_buffer: 0.104900 +vertex_buffer: 0.606500 +vertex_buffer: 3.034000 +vertex_buffer: 4.240947 +vertex_buffer: 5.756465 +vertex_buffer: 0.125300 +vertex_buffer: 0.561200 +vertex_buffer: 3.711000 +vertex_buffer: 4.269999 +vertex_buffer: 5.713801 +vertex_buffer: 0.125300 +vertex_buffer: 0.515600 +vertex_buffer: 3.038000 +vertex_buffer: 4.240872 +vertex_buffer: 5.817998 +vertex_buffer: 0.104900 +vertex_buffer: 0.561200 +vertex_buffer: 3.716000 +vertex_buffer: 4.269928 +vertex_buffer: 5.772200 +vertex_buffer: 0.104900 +vertex_buffer: 0.515600 +vertex_buffer: 4.251000 +vertex_buffer: 4.271059 +vertex_buffer: 5.663842 +vertex_buffer: 0.125300 +vertex_buffer: 0.479100 +vertex_buffer: 4.702000 +vertex_buffer: 4.250126 +vertex_buffer: 5.608262 +vertex_buffer: 0.125300 +vertex_buffer: 0.448800 +vertex_buffer: 4.257000 +vertex_buffer: 4.270990 +vertex_buffer: 5.720535 +vertex_buffer: 0.104900 +vertex_buffer: 0.479100 +vertex_buffer: 4.709000 +vertex_buffer: 4.249059 +vertex_buffer: 5.665525 +vertex_buffer: 0.104900 +vertex_buffer: 0.448800 +vertex_buffer: 5.115000 +vertex_buffer: 4.213196 +vertex_buffer: 5.551396 +vertex_buffer: 0.125300 +vertex_buffer: 0.421000 +vertex_buffer: 5.534000 +vertex_buffer: 4.156276 +vertex_buffer: 5.485503 +vertex_buffer: 0.125300 +vertex_buffer: 0.392600 +vertex_buffer: 5.122000 +vertex_buffer: 4.213128 +vertex_buffer: 5.607423 +vertex_buffer: 0.104900 +vertex_buffer: 0.421000 +vertex_buffer: 5.542000 +vertex_buffer: 4.156207 +vertex_buffer: 5.542446 +vertex_buffer: 0.104900 +vertex_buffer: 0.392600 +vertex_buffer: 6.007000 +vertex_buffer: 4.075372 +vertex_buffer: 5.406505 +vertex_buffer: 0.125300 +vertex_buffer: 0.360400 +vertex_buffer: 6.393000 +vertex_buffer: 4.002450 +vertex_buffer: 5.342460 +vertex_buffer: 0.125300 +vertex_buffer: 0.333900 +vertex_buffer: 6.015000 +vertex_buffer: 4.075303 +vertex_buffer: 5.463432 +vertex_buffer: 0.104900 +vertex_buffer: 0.360400 +vertex_buffer: 6.654430 +vertex_buffer: 3.946120 +vertex_buffer: 5.304128 +vertex_buffer: 0.125300 +vertex_buffer: 0.318300 +vertex_buffer: 6.667440 +vertex_buffer: 4.058049 +vertex_buffer: 5.362424 +vertex_buffer: 0.112500 +vertex_buffer: 0.318300 +vertex_buffer: 6.682430 +vertex_buffer: 4.054059 +vertex_buffer: 5.353259 +vertex_buffer: 0.113300 +vertex_buffer: 0.316700 +vertex_buffer: 6.684430 +vertex_buffer: 4.053086 +vertex_buffer: 5.331241 +vertex_buffer: 0.115200 +vertex_buffer: 0.316700 +vertex_buffer: 6.679430 +vertex_buffer: 4.062085 +vertex_buffer: 5.332533 +vertex_buffer: 0.115200 +vertex_buffer: 0.317300 +vertex_buffer: 6.677440 +vertex_buffer: 4.061067 +vertex_buffer: 5.348493 +vertex_buffer: 0.113300 +vertex_buffer: 0.317300 +vertex_buffer: 6.675430 +vertex_buffer: 4.054110 +vertex_buffer: 5.311325 +vertex_buffer: 0.117100 +vertex_buffer: 0.316700 +vertex_buffer: 6.672430 +vertex_buffer: 4.061104 +vertex_buffer: 5.317541 +vertex_buffer: 0.117100 +vertex_buffer: 0.317300 +vertex_buffer: 6.675430 +vertex_buffer: 4.054110 +vertex_buffer: 5.311325 +vertex_buffer: 0.117800 +vertex_buffer: 0.317300 +vertex_buffer: 6.675430 +vertex_buffer: 4.034112 +vertex_buffer: 5.310711 +vertex_buffer: 0.119200 +vertex_buffer: 0.317300 +vertex_buffer: 6.678430 +vertex_buffer: 3.942064 +vertex_buffer: 5.349873 +vertex_buffer: 0.104900 +vertex_buffer: 0.317300 +vertex_buffer: 6.682430 +vertex_buffer: 4.033060 +vertex_buffer: 5.352612 +vertex_buffer: 0.111100 +vertex_buffer: 0.317300 +vertex_buffer: 6.671430 +vertex_buffer: 3.942115 +vertex_buffer: 5.307944 +vertex_buffer: 0.125300 +vertex_buffer: 0.317300 +vertex_buffer: 6.682430 +vertex_buffer: 4.054059 +vertex_buffer: 5.353259 +vertex_buffer: 0.112500 +vertex_buffer: 0.317300 +vertex_buffer: 7.965990 +vertex_buffer: 3.860461 +vertex_buffer: -1.788585 +vertex_buffer: 0.203100 +vertex_buffer: 0.507000 +vertex_buffer: 8.052990 +vertex_buffer: 3.796094 +vertex_buffer: -3.073513 +vertex_buffer: 0.198000 +vertex_buffer: 0.592300 +vertex_buffer: 8.034990 +vertex_buffer: 3.803094 +vertex_buffer: -3.073276 +vertex_buffer: 0.196600 +vertex_buffer: 0.592000 +vertex_buffer: 7.945990 +vertex_buffer: 3.868422 +vertex_buffer: -1.787321 +vertex_buffer: 0.201700 +vertex_buffer: 0.506700 +vertex_buffer: 7.874000 +vertex_buffer: 3.893830 +vertex_buffer: -0.680814 +vertex_buffer: 0.205900 +vertex_buffer: 0.433200 +vertex_buffer: 7.896000 +vertex_buffer: 3.885868 +vertex_buffer: -0.682088 +vertex_buffer: 0.207400 +vertex_buffer: 0.433600 +vertex_buffer: 7.380990 +vertex_buffer: 3.877836 +vertex_buffer: 4.914070 +vertex_buffer: 0.222500 +vertex_buffer: 0.055900 +vertex_buffer: 7.371990 +vertex_buffer: 3.872185 +vertex_buffer: 4.929768 +vertex_buffer: 0.222400 +vertex_buffer: 0.054500 +vertex_buffer: 7.379990 +vertex_buffer: 3.852270 +vertex_buffer: 4.933774 +vertex_buffer: 0.224200 +vertex_buffer: 0.054600 +vertex_buffer: 7.391990 +vertex_buffer: 3.851786 +vertex_buffer: 4.912165 +vertex_buffer: 0.224200 +vertex_buffer: 0.056300 +vertex_buffer: 7.394990 +vertex_buffer: 3.848810 +vertex_buffer: 4.868700 +vertex_buffer: 0.224100 +vertex_buffer: 0.059300 +vertex_buffer: 7.383990 +vertex_buffer: 3.875837 +vertex_buffer: 4.869610 +vertex_buffer: 0.222300 +vertex_buffer: 0.059000 +vertex_buffer: 7.391990 +vertex_buffer: 3.778765 +vertex_buffer: 4.912025 +vertex_buffer: 0.229100 +vertex_buffer: 0.056900 +vertex_buffer: 7.394990 +vertex_buffer: 3.776789 +vertex_buffer: 4.868576 +vertex_buffer: 0.228900 +vertex_buffer: 0.059900 +vertex_buffer: 7.379990 +vertex_buffer: 3.780249 +vertex_buffer: 4.933623 +vertex_buffer: 0.229100 +vertex_buffer: 0.055200 +vertex_buffer: 7.934000 +vertex_buffer: 2.768480 +vertex_buffer: -7.441971 +vertex_buffer: 0.176600 +vertex_buffer: 0.898900 +vertex_buffer: 7.925000 +vertex_buffer: 2.694416 +vertex_buffer: -7.491223 +vertex_buffer: 0.182100 +vertex_buffer: 0.905500 +vertex_buffer: 7.887990 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.183300 +vertex_buffer: 0.914400 +vertex_buffer: 7.904990 +vertex_buffer: 2.747339 +vertex_buffer: -7.532923 +vertex_buffer: 0.173800 +vertex_buffer: 0.905600 +vertex_buffer: 7.934000 +vertex_buffer: 2.598799 +vertex_buffer: -7.395112 +vertex_buffer: 0.194600 +vertex_buffer: 0.904500 +vertex_buffer: 7.897000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.196300 +vertex_buffer: 0.913300 +vertex_buffer: 7.831990 +vertex_buffer: 3.860223 +vertex_buffer: -1.781595 +vertex_buffer: 0.194100 +vertex_buffer: 0.505600 +vertex_buffer: 7.933000 +vertex_buffer: 3.796029 +vertex_buffer: -3.071515 +vertex_buffer: 0.189600 +vertex_buffer: 0.591400 +vertex_buffer: 7.928990 +vertex_buffer: 3.775072 +vertex_buffer: -3.073229 +vertex_buffer: 0.188100 +vertex_buffer: 0.591800 +vertex_buffer: 7.828000 +vertex_buffer: 3.836231 +vertex_buffer: -1.782412 +vertex_buffer: 0.192400 +vertex_buffer: 0.505900 +vertex_buffer: 7.747990 +vertex_buffer: 3.885458 +vertex_buffer: -0.670096 +vertex_buffer: 0.197600 +vertex_buffer: 0.431600 +vertex_buffer: 7.742000 +vertex_buffer: 3.858442 +vertex_buffer: -0.670008 +vertex_buffer: 0.195700 +vertex_buffer: 0.431800 +vertex_buffer: 7.828000 +vertex_buffer: 3.775273 +vertex_buffer: -1.784482 +vertex_buffer: 0.188400 +vertex_buffer: 0.506300 +vertex_buffer: 7.742000 +vertex_buffer: 3.787447 +vertex_buffer: -0.671427 +vertex_buffer: 0.191000 +vertex_buffer: 0.432000 +vertex_buffer: 7.928990 +vertex_buffer: 3.721103 +vertex_buffer: -3.075061 +vertex_buffer: 0.184400 +vertex_buffer: 0.592400 +vertex_buffer: 7.191990 +vertex_buffer: 3.879943 +vertex_buffer: 4.963490 +vertex_buffer: 0.210400 +vertex_buffer: 0.050900 +vertex_buffer: 7.175000 +vertex_buffer: 3.872894 +vertex_buffer: 4.961334 +vertex_buffer: 0.209100 +vertex_buffer: 0.050600 +vertex_buffer: 7.168990 +vertex_buffer: 3.853018 +vertex_buffer: 4.967149 +vertex_buffer: 0.209200 +vertex_buffer: 0.048900 +vertex_buffer: 7.191000 +vertex_buffer: 3.854091 +vertex_buffer: 4.970313 +vertex_buffer: 0.210700 +vertex_buffer: 0.049300 +vertex_buffer: 7.347990 +vertex_buffer: 3.853528 +vertex_buffer: 4.945253 +vertex_buffer: 0.221000 +vertex_buffer: 0.052200 +vertex_buffer: 7.348990 +vertex_buffer: 3.879403 +vertex_buffer: 4.939404 +vertex_buffer: 0.220600 +vertex_buffer: 0.053900 +vertex_buffer: 7.191000 +vertex_buffer: 3.781066 +vertex_buffer: 4.970089 +vertex_buffer: 0.212100 +vertex_buffer: 0.044600 +vertex_buffer: 7.347990 +vertex_buffer: 3.780506 +vertex_buffer: 4.945084 +vertex_buffer: 0.222300 +vertex_buffer: 0.047500 +vertex_buffer: 7.168990 +vertex_buffer: 3.780995 +vertex_buffer: 4.966921 +vertex_buffer: 0.210600 +vertex_buffer: 0.044200 +vertex_buffer: 8.105990 +vertex_buffer: 3.673678 +vertex_buffer: -4.093265 +vertex_buffer: 0.193400 +vertex_buffer: 0.660000 +vertex_buffer: 8.088990 +vertex_buffer: 3.680678 +vertex_buffer: -4.093029 +vertex_buffer: 0.192100 +vertex_buffer: 0.659800 +vertex_buffer: 8.120990 +vertex_buffer: 3.479514 +vertex_buffer: -4.987499 +vertex_buffer: 0.189100 +vertex_buffer: 0.720200 +vertex_buffer: 8.104000 +vertex_buffer: 3.485179 +vertex_buffer: -4.988288 +vertex_buffer: 0.187800 +vertex_buffer: 0.720000 +vertex_buffer: 7.987990 +vertex_buffer: 3.650700 +vertex_buffer: -4.094052 +vertex_buffer: 0.183500 +vertex_buffer: 0.659800 +vertex_buffer: 7.992000 +vertex_buffer: 3.673678 +vertex_buffer: -4.093265 +vertex_buffer: 0.185200 +vertex_buffer: 0.659400 +vertex_buffer: 7.987990 +vertex_buffer: 3.596764 +vertex_buffer: -4.096878 +vertex_buffer: 0.179800 +vertex_buffer: 0.660700 +vertex_buffer: 8.002990 +vertex_buffer: 3.461763 +vertex_buffer: -4.984319 +vertex_buffer: 0.179100 +vertex_buffer: 0.719900 +vertex_buffer: 8.002990 +vertex_buffer: 3.417072 +vertex_buffer: -4.978396 +vertex_buffer: 0.174700 +vertex_buffer: 0.720500 +vertex_buffer: 8.006990 +vertex_buffer: 3.479514 +vertex_buffer: -4.987499 +vertex_buffer: 0.180900 +vertex_buffer: 0.719700 +vertex_buffer: 7.934000 +vertex_buffer: 3.447359 +vertex_buffer: -3.089672 +vertex_buffer: 0.044900 +vertex_buffer: 0.617400 +vertex_buffer: 7.992000 +vertex_buffer: 3.357943 +vertex_buffer: -4.109610 +vertex_buffer: 0.041500 +vertex_buffer: 0.685800 +vertex_buffer: 8.003990 +vertex_buffer: 3.352830 +vertex_buffer: -4.109857 +vertex_buffer: 0.040600 +vertex_buffer: 0.685900 +vertex_buffer: 7.947990 +vertex_buffer: 3.442067 +vertex_buffer: -3.089904 +vertex_buffer: 0.043900 +vertex_buffer: 0.617400 +vertex_buffer: 8.088990 +vertex_buffer: 3.352830 +vertex_buffer: -4.109857 +vertex_buffer: 0.034900 +vertex_buffer: 0.685900 +vertex_buffer: 8.036000 +vertex_buffer: 3.442122 +vertex_buffer: -3.091901 +vertex_buffer: 0.038000 +vertex_buffer: 0.617500 +vertex_buffer: 8.107000 +vertex_buffer: 3.171661 +vertex_buffer: -4.940374 +vertex_buffer: 0.033700 +vertex_buffer: 0.743900 +vertex_buffer: 8.021990 +vertex_buffer: 3.171661 +vertex_buffer: -4.940374 +vertex_buffer: 0.039400 +vertex_buffer: 0.744000 +vertex_buffer: 8.008990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.040400 +vertex_buffer: 0.743800 +vertex_buffer: 8.058990 +vertex_buffer: 3.464813 +vertex_buffer: -3.089913 +vertex_buffer: 0.220100 +vertex_buffer: 0.598100 +vertex_buffer: 8.052990 +vertex_buffer: 3.447424 +vertex_buffer: -3.091670 +vertex_buffer: 0.221700 +vertex_buffer: 0.598600 +vertex_buffer: 8.107000 +vertex_buffer: 3.357943 +vertex_buffer: -4.109610 +vertex_buffer: 0.217200 +vertex_buffer: 0.668000 +vertex_buffer: 8.111990 +vertex_buffer: 3.375448 +vertex_buffer: -4.108757 +vertex_buffer: 0.215600 +vertex_buffer: 0.667500 +vertex_buffer: 8.111990 +vertex_buffer: 3.415553 +vertex_buffer: -4.105852 +vertex_buffer: 0.212000 +vertex_buffer: 0.666200 +vertex_buffer: 8.058990 +vertex_buffer: 3.514396 +vertex_buffer: -3.087097 +vertex_buffer: 0.216600 +vertex_buffer: 0.597100 +vertex_buffer: 8.129000 +vertex_buffer: 3.196587 +vertex_buffer: -4.943947 +vertex_buffer: 0.215600 +vertex_buffer: 0.726900 +vertex_buffer: 8.129000 +vertex_buffer: 3.242119 +vertex_buffer: -4.950491 +vertex_buffer: 0.211300 +vertex_buffer: 0.725700 +vertex_buffer: 8.123990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.217500 +vertex_buffer: 0.727300 +vertex_buffer: 8.102000 +vertex_buffer: 3.285218 +vertex_buffer: -5.896081 +vertex_buffer: 0.183700 +vertex_buffer: 0.782700 +vertex_buffer: 8.084990 +vertex_buffer: 3.292145 +vertex_buffer: -5.899772 +vertex_buffer: 0.182700 +vertex_buffer: 0.782700 +vertex_buffer: 8.054990 +vertex_buffer: 3.078037 +vertex_buffer: -6.686757 +vertex_buffer: 0.178700 +vertex_buffer: 0.838700 +vertex_buffer: 8.037990 +vertex_buffer: 3.086942 +vertex_buffer: -6.694157 +vertex_buffer: 0.178000 +vertex_buffer: 0.838900 +vertex_buffer: 7.984000 +vertex_buffer: 3.265013 +vertex_buffer: -5.884281 +vertex_buffer: 0.174900 +vertex_buffer: 0.782200 +vertex_buffer: 7.986990 +vertex_buffer: 3.285218 +vertex_buffer: -5.896081 +vertex_buffer: 0.176600 +vertex_buffer: 0.782500 +vertex_buffer: 7.984990 +vertex_buffer: 3.209211 +vertex_buffer: -5.851367 +vertex_buffer: 0.169400 +vertex_buffer: 0.781500 +vertex_buffer: 7.940990 +vertex_buffer: 2.980811 +vertex_buffer: -6.599105 +vertex_buffer: 0.163500 +vertex_buffer: 0.836100 +vertex_buffer: 7.937990 +vertex_buffer: 3.050556 +vertex_buffer: -6.662535 +vertex_buffer: 0.170600 +vertex_buffer: 0.838100 +vertex_buffer: 7.940990 +vertex_buffer: 3.078037 +vertex_buffer: -6.686757 +vertex_buffer: 0.172700 +vertex_buffer: 0.838800 +vertex_buffer: 7.995990 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.040100 +vertex_buffer: 0.799500 +vertex_buffer: 8.008990 +vertex_buffer: 2.894458 +vertex_buffer: -5.665260 +vertex_buffer: 0.039000 +vertex_buffer: 0.799600 +vertex_buffer: 8.093990 +vertex_buffer: 2.894458 +vertex_buffer: -5.665260 +vertex_buffer: 0.033300 +vertex_buffer: 0.799400 +vertex_buffer: 8.059990 +vertex_buffer: 2.609329 +vertex_buffer: -6.232546 +vertex_buffer: 0.033500 +vertex_buffer: 0.847100 +vertex_buffer: 7.974990 +vertex_buffer: 2.609329 +vertex_buffer: -6.232546 +vertex_buffer: 0.039200 +vertex_buffer: 0.847400 +vertex_buffer: 7.962000 +vertex_buffer: 2.617312 +vertex_buffer: -6.241937 +vertex_buffer: 0.040500 +vertex_buffer: 0.847400 +vertex_buffer: 8.111000 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.222900 +vertex_buffer: 0.784000 +vertex_buffer: 8.116000 +vertex_buffer: 2.923470 +vertex_buffer: -5.681317 +vertex_buffer: 0.220600 +vertex_buffer: 0.783800 +vertex_buffer: 8.114990 +vertex_buffer: 2.981445 +vertex_buffer: -5.714432 +vertex_buffer: 0.214600 +vertex_buffer: 0.783700 +vertex_buffer: 8.080990 +vertex_buffer: 2.639407 +vertex_buffer: -6.273185 +vertex_buffer: 0.227500 +vertex_buffer: 0.833300 +vertex_buffer: 8.077990 +vertex_buffer: 2.705994 +vertex_buffer: -6.343040 +vertex_buffer: 0.219700 +vertex_buffer: 0.834800 +vertex_buffer: 8.077000 +vertex_buffer: 2.617312 +vertex_buffer: -6.241937 +vertex_buffer: 0.230400 +vertex_buffer: 0.832400 +vertex_buffer: 7.984000 +vertex_buffer: 2.886415 +vertex_buffer: -7.224260 +vertex_buffer: 0.176300 +vertex_buffer: 0.879600 +vertex_buffer: 7.965990 +vertex_buffer: 2.899952 +vertex_buffer: -7.236242 +vertex_buffer: 0.174900 +vertex_buffer: 0.879800 +vertex_buffer: 7.869990 +vertex_buffer: 2.886415 +vertex_buffer: -7.224260 +vertex_buffer: 0.168700 +vertex_buffer: 0.879800 +vertex_buffer: 7.868000 +vertex_buffer: 2.845190 +vertex_buffer: -7.190347 +vertex_buffer: 0.164500 +vertex_buffer: 0.879000 +vertex_buffer: 7.873990 +vertex_buffer: 2.760812 +vertex_buffer: -7.105696 +vertex_buffer: 0.154900 +vertex_buffer: 0.876400 +vertex_buffer: 7.819000 +vertex_buffer: 2.598799 +vertex_buffer: -7.395112 +vertex_buffer: 0.146400 +vertex_buffer: 0.901300 +vertex_buffer: 7.810990 +vertex_buffer: 2.694416 +vertex_buffer: -7.491223 +vertex_buffer: 0.158600 +vertex_buffer: 0.904300 +vertex_buffer: 7.828000 +vertex_buffer: 2.768480 +vertex_buffer: -7.441971 +vertex_buffer: 0.165100 +vertex_buffer: 0.898700 +vertex_buffer: 7.922990 +vertex_buffer: 2.392508 +vertex_buffer: -6.583891 +vertex_buffer: 0.040800 +vertex_buffer: 0.880000 +vertex_buffer: 7.909990 +vertex_buffer: 2.398531 +vertex_buffer: -6.603409 +vertex_buffer: 0.042400 +vertex_buffer: 0.880600 +vertex_buffer: 8.008000 +vertex_buffer: 2.392508 +vertex_buffer: -6.583891 +vertex_buffer: 0.035100 +vertex_buffer: 0.879700 +vertex_buffer: 7.875990 +vertex_buffer: 2.257022 +vertex_buffer: -6.797844 +vertex_buffer: 0.042500 +vertex_buffer: 0.900300 +vertex_buffer: 7.960990 +vertex_buffer: 2.257022 +vertex_buffer: -6.797844 +vertex_buffer: 0.036900 +vertex_buffer: 0.899900 +vertex_buffer: 7.878000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.044500 +vertex_buffer: 0.896400 +vertex_buffer: 8.023990 +vertex_buffer: 2.398531 +vertex_buffer: -6.603409 +vertex_buffer: 0.237500 +vertex_buffer: 0.865700 +vertex_buffer: 8.027000 +vertex_buffer: 2.416037 +vertex_buffer: -6.664004 +vertex_buffer: 0.234100 +vertex_buffer: 0.868500 +vertex_buffer: 8.020990 +vertex_buffer: 2.477773 +vertex_buffer: -6.773771 +vertex_buffer: 0.225400 +vertex_buffer: 0.872400 +vertex_buffer: 7.973990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.239800 +vertex_buffer: 0.889500 +vertex_buffer: 7.965990 +vertex_buffer: 2.324553 +vertex_buffer: -7.024226 +vertex_buffer: 0.230700 +vertex_buffer: 0.895400 +vertex_buffer: 7.984000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.241100 +vertex_buffer: 0.881300 +vertex_buffer: 7.870000 +vertex_buffer: 3.893466 +vertex_buffer: -0.199551 +vertex_buffer: 0.209300 +vertex_buffer: 0.401700 +vertex_buffer: 7.847000 +vertex_buffer: 3.902400 +vertex_buffer: -0.197242 +vertex_buffer: 0.207700 +vertex_buffer: 0.401200 +vertex_buffer: 7.805000 +vertex_buffer: 3.890782 +vertex_buffer: 0.526775 +vertex_buffer: 0.211700 +vertex_buffer: 0.353300 +vertex_buffer: 7.781000 +vertex_buffer: 3.899673 +vertex_buffer: 0.530080 +vertex_buffer: 0.210000 +vertex_buffer: 0.352800 +vertex_buffer: 7.903000 +vertex_buffer: 3.858915 +vertex_buffer: -0.684007 +vertex_buffer: 0.209200 +vertex_buffer: 0.434100 +vertex_buffer: 7.973000 +vertex_buffer: 3.836469 +vertex_buffer: -1.789402 +vertex_buffer: 0.204700 +vertex_buffer: 0.507500 +vertex_buffer: 8.058990 +vertex_buffer: 3.775104 +vertex_buffer: -3.074234 +vertex_buffer: 0.199500 +vertex_buffer: 0.592800 +vertex_buffer: 8.058990 +vertex_buffer: 3.721167 +vertex_buffer: -3.077068 +vertex_buffer: 0.203000 +vertex_buffer: 0.593900 +vertex_buffer: 7.973000 +vertex_buffer: 3.775509 +vertex_buffer: -1.791481 +vertex_buffer: 0.208700 +vertex_buffer: 0.508400 +vertex_buffer: 7.903000 +vertex_buffer: 3.787890 +vertex_buffer: -0.684412 +vertex_buffer: 0.213800 +vertex_buffer: 0.434700 +vertex_buffer: 7.710000 +vertex_buffer: 3.865942 +vertex_buffer: -0.184480 +vertex_buffer: 0.197100 +vertex_buffer: 0.399400 +vertex_buffer: 7.714990 +vertex_buffer: 3.892960 +vertex_buffer: -0.184558 +vertex_buffer: 0.198900 +vertex_buffer: 0.399400 +vertex_buffer: 7.642990 +vertex_buffer: 3.890169 +vertex_buffer: 0.544772 +vertex_buffer: 0.200700 +vertex_buffer: 0.350700 +vertex_buffer: 7.638000 +vertex_buffer: 3.863151 +vertex_buffer: 0.544850 +vertex_buffer: 0.198800 +vertex_buffer: 0.350700 +vertex_buffer: 7.638000 +vertex_buffer: 3.790158 +vertex_buffer: 0.543367 +vertex_buffer: 0.193800 +vertex_buffer: 0.350800 +vertex_buffer: 7.710000 +vertex_buffer: 3.791945 +vertex_buffer: -0.185995 +vertex_buffer: 0.192200 +vertex_buffer: 0.399600 +vertex_buffer: 7.650000 +vertex_buffer: 3.858467 +vertex_buffer: 2.359713 +vertex_buffer: 0.218500 +vertex_buffer: 0.231100 +vertex_buffer: 7.640990 +vertex_buffer: 3.885418 +vertex_buffer: 2.361643 +vertex_buffer: 0.216600 +vertex_buffer: 0.230800 +vertex_buffer: 7.650000 +vertex_buffer: 3.785477 +vertex_buffer: 2.358199 +vertex_buffer: 0.223300 +vertex_buffer: 0.231600 +vertex_buffer: 7.352990 +vertex_buffer: 3.884966 +vertex_buffer: 4.875182 +vertex_buffer: 0.220300 +vertex_buffer: 0.058300 +vertex_buffer: 7.349990 +vertex_buffer: 3.886940 +vertex_buffer: 4.918657 +vertex_buffer: 0.220500 +vertex_buffer: 0.055400 +vertex_buffer: 7.161990 +vertex_buffer: 3.852596 +vertex_buffer: 4.948286 +vertex_buffer: 0.207200 +vertex_buffer: 0.051600 +vertex_buffer: 7.169990 +vertex_buffer: 3.878585 +vertex_buffer: 4.947458 +vertex_buffer: 0.208900 +vertex_buffer: 0.051700 +vertex_buffer: 7.172990 +vertex_buffer: 3.876585 +vertex_buffer: 4.902970 +vertex_buffer: 0.208700 +vertex_buffer: 0.054700 +vertex_buffer: 7.166000 +vertex_buffer: 3.849597 +vertex_buffer: 4.903809 +vertex_buffer: 0.207000 +vertex_buffer: 0.054600 +vertex_buffer: 7.168990 +vertex_buffer: 3.853018 +vertex_buffer: 4.967149 +vertex_buffer: 0.207300 +vertex_buffer: 0.050200 +vertex_buffer: 7.161990 +vertex_buffer: 3.779573 +vertex_buffer: 4.948061 +vertex_buffer: 0.202300 +vertex_buffer: 0.051600 +vertex_buffer: 7.168990 +vertex_buffer: 3.780995 +vertex_buffer: 4.966921 +vertex_buffer: 0.202400 +vertex_buffer: 0.050300 +vertex_buffer: 7.166000 +vertex_buffer: 3.777573 +vertex_buffer: 4.903604 +vertex_buffer: 0.202200 +vertex_buffer: 0.054700 +vertex_buffer: 7.847000 +vertex_buffer: 3.868250 +vertex_buffer: -1.782319 +vertex_buffer: 0.195200 +vertex_buffer: 0.505600 +vertex_buffer: 7.764000 +vertex_buffer: 3.893528 +vertex_buffer: -0.671817 +vertex_buffer: 0.198700 +vertex_buffer: 0.431800 +vertex_buffer: 7.946990 +vertex_buffer: 3.803019 +vertex_buffer: -3.071277 +vertex_buffer: 0.190800 +vertex_buffer: 0.591300 +vertex_buffer: 7.887990 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.172000 +vertex_buffer: 0.917200 +vertex_buffer: 7.803000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.166300 +vertex_buffer: 0.917200 +vertex_buffer: 7.819990 +vertex_buffer: 2.747339 +vertex_buffer: -7.532923 +vertex_buffer: 0.166600 +vertex_buffer: 0.905600 +vertex_buffer: 7.803000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.155700 +vertex_buffer: 0.912500 +vertex_buffer: 7.812000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.143300 +vertex_buffer: 0.909300 +vertex_buffer: 7.897000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.172300 +vertex_buffer: 0.929500 +vertex_buffer: 7.812000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.166600 +vertex_buffer: 0.929200 +vertex_buffer: 8.003990 +vertex_buffer: 3.680678 +vertex_buffer: -4.093029 +vertex_buffer: 0.186300 +vertex_buffer: 0.659300 +vertex_buffer: 8.018990 +vertex_buffer: 3.485179 +vertex_buffer: -4.988288 +vertex_buffer: 0.182000 +vertex_buffer: 0.719600 +vertex_buffer: 7.999990 +vertex_buffer: 3.292145 +vertex_buffer: -5.899772 +vertex_buffer: 0.177400 +vertex_buffer: 0.782500 +vertex_buffer: 7.952990 +vertex_buffer: 3.086942 +vertex_buffer: -6.694157 +vertex_buffer: 0.173300 +vertex_buffer: 0.839000 +vertex_buffer: 7.881000 +vertex_buffer: 2.899952 +vertex_buffer: -7.236242 +vertex_buffer: 0.169900 +vertex_buffer: 0.880000 +vertex_buffer: 7.613990 +vertex_buffer: 3.894277 +vertex_buffer: 2.365932 +vertex_buffer: 0.214700 +vertex_buffer: 0.230200 +vertex_buffer: 7.478990 +vertex_buffer: 3.893669 +vertex_buffer: 2.383827 +vertex_buffer: 0.205900 +vertex_buffer: 0.227800 +vertex_buffer: 7.197000 +vertex_buffer: 3.885507 +vertex_buffer: 4.899256 +vertex_buffer: 0.210300 +vertex_buffer: 0.055200 +vertex_buffer: 7.458990 +vertex_buffer: 3.884573 +vertex_buffer: 2.386487 +vertex_buffer: 0.204400 +vertex_buffer: 0.227500 +vertex_buffer: 7.192990 +vertex_buffer: 3.887502 +vertex_buffer: 4.943732 +vertex_buffer: 0.210400 +vertex_buffer: 0.052200 +vertex_buffer: 7.379990 +vertex_buffer: 3.852270 +vertex_buffer: 4.933774 +vertex_buffer: 0.223200 +vertex_buffer: 0.052900 +vertex_buffer: 7.379990 +vertex_buffer: 3.780249 +vertex_buffer: 4.933623 +vertex_buffer: 0.224500 +vertex_buffer: 0.048100 +vertex_buffer: 8.052990 +vertex_buffer: 3.447424 +vertex_buffer: -3.091670 +vertex_buffer: 0.036800 +vertex_buffer: 0.617500 +vertex_buffer: 8.107000 +vertex_buffer: 3.357943 +vertex_buffer: -4.109610 +vertex_buffer: 0.033700 +vertex_buffer: 0.685800 +vertex_buffer: 8.123990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.032500 +vertex_buffer: 0.743700 +vertex_buffer: 8.111000 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.032000 +vertex_buffer: 0.799200 +vertex_buffer: 8.077000 +vertex_buffer: 2.617312 +vertex_buffer: -6.241937 +vertex_buffer: 0.032000 +vertex_buffer: 0.847000 +vertex_buffer: 8.023990 +vertex_buffer: 2.398531 +vertex_buffer: -6.603409 +vertex_buffer: 0.033400 +vertex_buffer: 0.880300 +vertex_buffer: 7.984000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.034200 +vertex_buffer: 0.896200 +vertex_buffer: 7.391990 +vertex_buffer: 3.380723 +vertex_buffer: 4.912532 +vertex_buffer: 0.253500 +vertex_buffer: 0.059000 +vertex_buffer: 7.394990 +vertex_buffer: 3.377893 +vertex_buffer: 4.869167 +vertex_buffer: 0.253400 +vertex_buffer: 0.062000 +vertex_buffer: 7.394990 +vertex_buffer: 3.450573 +vertex_buffer: 4.868867 +vertex_buffer: 0.248500 +vertex_buffer: 0.061500 +vertex_buffer: 7.391990 +vertex_buffer: 3.453545 +vertex_buffer: 4.912242 +vertex_buffer: 0.248600 +vertex_buffer: 0.058500 +vertex_buffer: 7.383990 +vertex_buffer: 3.350628 +vertex_buffer: 4.870098 +vertex_buffer: 0.255400 +vertex_buffer: 0.062000 +vertex_buffer: 7.380990 +vertex_buffer: 3.353447 +vertex_buffer: 4.914453 +vertex_buffer: 0.255500 +vertex_buffer: 0.058900 +vertex_buffer: 7.379990 +vertex_buffer: 3.381164 +vertex_buffer: 4.934087 +vertex_buffer: 0.253700 +vertex_buffer: 0.057400 +vertex_buffer: 7.371990 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.255200 +vertex_buffer: 0.057600 +vertex_buffer: 7.379990 +vertex_buffer: 3.454030 +vertex_buffer: 4.933803 +vertex_buffer: 0.248700 +vertex_buffer: 0.056800 +vertex_buffer: 7.940990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.037400 +vertex_buffer: 0.911100 +vertex_buffer: 7.973990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.034000 +vertex_buffer: 0.904500 +vertex_buffer: 7.940990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.241900 +vertex_buffer: 0.896700 +vertex_buffer: 7.931990 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.232600 +vertex_buffer: 0.903300 +vertex_buffer: 7.742000 +vertex_buffer: 3.409784 +vertex_buffer: -0.678609 +vertex_buffer: 0.167200 +vertex_buffer: 0.432400 +vertex_buffer: 7.742000 +vertex_buffer: 3.470328 +vertex_buffer: -0.677179 +vertex_buffer: 0.172000 +vertex_buffer: 0.432200 +vertex_buffer: 7.828000 +vertex_buffer: 3.530379 +vertex_buffer: -1.791814 +vertex_buffer: 0.172000 +vertex_buffer: 0.507000 +vertex_buffer: 7.828000 +vertex_buffer: 3.467998 +vertex_buffer: -1.793948 +vertex_buffer: 0.167800 +vertex_buffer: 0.507400 +vertex_buffer: 7.834000 +vertex_buffer: 3.449010 +vertex_buffer: -1.795735 +vertex_buffer: 0.166200 +vertex_buffer: 0.507700 +vertex_buffer: 7.749000 +vertex_buffer: 3.386746 +vertex_buffer: -0.679570 +vertex_buffer: 0.165300 +vertex_buffer: 0.432600 +vertex_buffer: 7.928990 +vertex_buffer: 3.464759 +vertex_buffer: -3.087906 +vertex_buffer: 0.166900 +vertex_buffer: 0.594600 +vertex_buffer: 7.934000 +vertex_buffer: 3.447359 +vertex_buffer: -3.089672 +vertex_buffer: 0.165300 +vertex_buffer: 0.594900 +vertex_buffer: 7.928990 +vertex_buffer: 3.514363 +vertex_buffer: -3.086102 +vertex_buffer: 0.170500 +vertex_buffer: 0.594000 +vertex_buffer: 7.347990 +vertex_buffer: 3.382373 +vertex_buffer: 4.945468 +vertex_buffer: 0.228900 +vertex_buffer: 0.023800 +vertex_buffer: 7.347990 +vertex_buffer: 3.455285 +vertex_buffer: 4.945201 +vertex_buffer: 0.227600 +vertex_buffer: 0.028600 +vertex_buffer: 7.190990 +vertex_buffer: 3.455841 +vertex_buffer: 4.969986 +vertex_buffer: 0.217300 +vertex_buffer: 0.025700 +vertex_buffer: 7.190990 +vertex_buffer: 3.382879 +vertex_buffer: 4.970204 +vertex_buffer: 0.218700 +vertex_buffer: 0.021000 +vertex_buffer: 7.191990 +vertex_buffer: 3.355385 +vertex_buffer: 4.963379 +vertex_buffer: 0.219300 +vertex_buffer: 0.019200 +vertex_buffer: 7.348990 +vertex_buffer: 3.354917 +vertex_buffer: 4.939651 +vertex_buffer: 0.229500 +vertex_buffer: 0.022000 +vertex_buffer: 7.168990 +vertex_buffer: 3.381837 +vertex_buffer: 4.967017 +vertex_buffer: 0.217300 +vertex_buffer: 0.020500 +vertex_buffer: 7.174990 +vertex_buffer: 3.362181 +vertex_buffer: 4.961206 +vertex_buffer: 0.218100 +vertex_buffer: 0.019200 +vertex_buffer: 7.168990 +vertex_buffer: 3.454769 +vertex_buffer: 4.966805 +vertex_buffer: 0.215900 +vertex_buffer: 0.025300 +vertex_buffer: 7.987990 +vertex_buffer: 3.415553 +vertex_buffer: -4.105852 +vertex_buffer: 0.166000 +vertex_buffer: 0.663100 +vertex_buffer: 7.987990 +vertex_buffer: 3.375448 +vertex_buffer: -4.108757 +vertex_buffer: 0.162200 +vertex_buffer: 0.663900 +vertex_buffer: 7.992000 +vertex_buffer: 3.357943 +vertex_buffer: -4.109610 +vertex_buffer: 0.160600 +vertex_buffer: 0.664300 +vertex_buffer: 8.008990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.151800 +vertex_buffer: 0.722800 +vertex_buffer: 8.004990 +vertex_buffer: 3.196587 +vertex_buffer: -4.943947 +vertex_buffer: 0.153700 +vertex_buffer: 0.722600 +vertex_buffer: 8.003990 +vertex_buffer: 3.242119 +vertex_buffer: -4.950491 +vertex_buffer: 0.158100 +vertex_buffer: 0.722100 +vertex_buffer: 8.111990 +vertex_buffer: 3.596764 +vertex_buffer: -4.096878 +vertex_buffer: 0.198600 +vertex_buffer: 0.661900 +vertex_buffer: 8.127990 +vertex_buffer: 3.417072 +vertex_buffer: -4.978396 +vertex_buffer: 0.195100 +vertex_buffer: 0.721700 +vertex_buffer: 7.990990 +vertex_buffer: 2.923470 +vertex_buffer: -5.681317 +vertex_buffer: 0.140000 +vertex_buffer: 0.778100 +vertex_buffer: 7.990000 +vertex_buffer: 2.981445 +vertex_buffer: -5.714432 +vertex_buffer: 0.145900 +vertex_buffer: 0.778800 +vertex_buffer: 7.995990 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.137700 +vertex_buffer: 0.777900 +vertex_buffer: 7.962000 +vertex_buffer: 2.617312 +vertex_buffer: -6.241937 +vertex_buffer: 0.122700 +vertex_buffer: 0.824400 +vertex_buffer: 7.956000 +vertex_buffer: 2.639407 +vertex_buffer: -6.273185 +vertex_buffer: 0.125400 +vertex_buffer: 0.825900 +vertex_buffer: 7.952990 +vertex_buffer: 2.705994 +vertex_buffer: -6.343040 +vertex_buffer: 0.132900 +vertex_buffer: 0.828500 +vertex_buffer: 8.108990 +vertex_buffer: 3.209211 +vertex_buffer: -5.851367 +vertex_buffer: 0.191000 +vertex_buffer: 0.782700 +vertex_buffer: 8.065990 +vertex_buffer: 2.980811 +vertex_buffer: -6.599105 +vertex_buffer: 0.188300 +vertex_buffer: 0.837500 +vertex_buffer: 7.903000 +vertex_buffer: 2.416037 +vertex_buffer: -6.664004 +vertex_buffer: 0.113300 +vertex_buffer: 0.859500 +vertex_buffer: 7.897000 +vertex_buffer: 2.477773 +vertex_buffer: -6.773771 +vertex_buffer: 0.121200 +vertex_buffer: 0.864700 +vertex_buffer: 7.909990 +vertex_buffer: 2.398531 +vertex_buffer: -6.603409 +vertex_buffer: 0.110400 +vertex_buffer: 0.856100 +vertex_buffer: 7.859990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.104200 +vertex_buffer: 0.879200 +vertex_buffer: 7.878000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.104400 +vertex_buffer: 0.870800 +vertex_buffer: 7.850990 +vertex_buffer: 2.324553 +vertex_buffer: -7.024226 +vertex_buffer: 0.112200 +vertex_buffer: 0.886500 +vertex_buffer: 7.997990 +vertex_buffer: 2.760812 +vertex_buffer: -7.105696 +vertex_buffer: 0.190300 +vertex_buffer: 0.878600 +vertex_buffer: 7.973000 +vertex_buffer: 3.530650 +vertex_buffer: -1.799798 +vertex_buffer: 0.224900 +vertex_buffer: 0.510900 +vertex_buffer: 7.973000 +vertex_buffer: 3.468228 +vertex_buffer: -1.801936 +vertex_buffer: 0.229000 +vertex_buffer: 0.511800 +vertex_buffer: 7.967000 +vertex_buffer: 3.449207 +vertex_buffer: -1.802729 +vertex_buffer: 0.230600 +vertex_buffer: 0.512100 +vertex_buffer: 7.903000 +vertex_buffer: 3.410185 +vertex_buffer: -0.691599 +vertex_buffer: 0.237500 +vertex_buffer: 0.437500 +vertex_buffer: 7.897000 +vertex_buffer: 3.387125 +vertex_buffer: -0.691567 +vertex_buffer: 0.239400 +vertex_buffer: 0.437700 +vertex_buffer: 7.903000 +vertex_buffer: 3.470770 +vertex_buffer: -0.690174 +vertex_buffer: 0.232800 +vertex_buffer: 0.436900 +vertex_buffer: 7.165990 +vertex_buffer: 3.451351 +vertex_buffer: 4.903571 +vertex_buffer: 0.182600 +vertex_buffer: 0.054900 +vertex_buffer: 7.452990 +vertex_buffer: 3.456503 +vertex_buffer: 2.377381 +vertex_buffer: 0.177800 +vertex_buffer: 0.227400 +vertex_buffer: 7.452990 +vertex_buffer: 3.387348 +vertex_buffer: 2.375706 +vertex_buffer: 0.172800 +vertex_buffer: 0.227400 +vertex_buffer: 7.165990 +vertex_buffer: 3.378596 +vertex_buffer: 4.903802 +vertex_buffer: 0.177700 +vertex_buffer: 0.055000 +vertex_buffer: 7.459990 +vertex_buffer: 3.362018 +vertex_buffer: 2.373692 +vertex_buffer: 0.170900 +vertex_buffer: 0.227600 +vertex_buffer: 7.172990 +vertex_buffer: 3.351269 +vertex_buffer: 4.902964 +vertex_buffer: 0.175700 +vertex_buffer: 0.055100 +vertex_buffer: 7.877000 +vertex_buffer: 3.461315 +vertex_buffer: -0.206221 +vertex_buffer: 0.235800 +vertex_buffer: 0.404400 +vertex_buffer: 7.877000 +vertex_buffer: 3.398390 +vertex_buffer: -0.206755 +vertex_buffer: 0.240800 +vertex_buffer: 0.404900 +vertex_buffer: 7.872000 +vertex_buffer: 3.374127 +vertex_buffer: -0.206767 +vertex_buffer: 0.242700 +vertex_buffer: 0.405000 +vertex_buffer: 7.813000 +vertex_buffer: 3.394890 +vertex_buffer: 0.517606 +vertex_buffer: 0.243500 +vertex_buffer: 0.356200 +vertex_buffer: 7.806990 +vertex_buffer: 3.370908 +vertex_buffer: 0.517625 +vertex_buffer: 0.245400 +vertex_buffer: 0.356300 +vertex_buffer: 7.813000 +vertex_buffer: 3.459661 +vertex_buffer: 0.519144 +vertex_buffer: 0.238500 +vertex_buffer: 0.355700 +vertex_buffer: 7.161990 +vertex_buffer: 3.454347 +vertex_buffer: 4.947961 +vertex_buffer: 0.182700 +vertex_buffer: 0.051900 +vertex_buffer: 7.161990 +vertex_buffer: 3.381451 +vertex_buffer: 4.948177 +vertex_buffer: 0.177800 +vertex_buffer: 0.052000 +vertex_buffer: 7.168990 +vertex_buffer: 3.381837 +vertex_buffer: 4.967017 +vertex_buffer: 0.177700 +vertex_buffer: 0.050600 +vertex_buffer: 7.168990 +vertex_buffer: 3.454769 +vertex_buffer: 4.966805 +vertex_buffer: 0.182700 +vertex_buffer: 0.050500 +vertex_buffer: 7.169990 +vertex_buffer: 3.354093 +vertex_buffer: 4.947350 +vertex_buffer: 0.175900 +vertex_buffer: 0.052100 +vertex_buffer: 7.174990 +vertex_buffer: 3.362181 +vertex_buffer: 4.961206 +vertex_buffer: 0.176300 +vertex_buffer: 0.051000 +vertex_buffer: 7.856990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.101100 +vertex_buffer: 0.885600 +vertex_buffer: 7.847000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.109100 +vertex_buffer: 0.893600 +vertex_buffer: 7.859990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.045600 +vertex_buffer: 0.904800 +vertex_buffer: 7.856990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.043100 +vertex_buffer: 0.911500 +vertex_buffer: 7.940990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.172700 +vertex_buffer: 0.978000 +vertex_buffer: 7.856990 +vertex_buffer: 2.221734 +vertex_buffer: -6.955181 +vertex_buffer: 0.167100 +vertex_buffer: 0.977700 +vertex_buffer: 7.847000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.167000 +vertex_buffer: 0.966400 +vertex_buffer: 7.931990 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.172700 +vertex_buffer: 0.966700 +vertex_buffer: 7.379990 +vertex_buffer: 3.381164 +vertex_buffer: 4.934087 +vertex_buffer: 0.231100 +vertex_buffer: 0.024400 +vertex_buffer: 7.379990 +vertex_buffer: 3.454030 +vertex_buffer: 4.933803 +vertex_buffer: 0.229800 +vertex_buffer: 0.029200 +vertex_buffer: 7.371990 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.231000 +vertex_buffer: 0.022900 +vertex_buffer: 7.452990 +vertex_buffer: 3.784569 +vertex_buffer: 2.384960 +vertex_buffer: 0.197600 +vertex_buffer: 0.227500 +vertex_buffer: 7.877000 +vertex_buffer: 3.792463 +vertex_buffer: -0.200980 +vertex_buffer: 0.215900 +vertex_buffer: 0.402600 +vertex_buffer: 7.813000 +vertex_buffer: 3.790804 +vertex_buffer: 0.524376 +vertex_buffer: 0.218500 +vertex_buffer: 0.354200 +vertex_buffer: 8.111990 +vertex_buffer: 3.650700 +vertex_buffer: -4.094052 +vertex_buffer: 0.195100 +vertex_buffer: 0.660600 +vertex_buffer: 8.127000 +vertex_buffer: 3.461763 +vertex_buffer: -4.984319 +vertex_buffer: 0.190900 +vertex_buffer: 0.720600 +vertex_buffer: 8.108000 +vertex_buffer: 3.265013 +vertex_buffer: -5.884281 +vertex_buffer: 0.185500 +vertex_buffer: 0.782600 +vertex_buffer: 8.061990 +vertex_buffer: 3.050556 +vertex_buffer: -6.662535 +vertex_buffer: 0.181000 +vertex_buffer: 0.838300 +vertex_buffer: 7.991990 +vertex_buffer: 2.845190 +vertex_buffer: -7.190347 +vertex_buffer: 0.180500 +vertex_buffer: 0.879500 +vertex_buffer: 7.452990 +vertex_buffer: 3.857557 +vertex_buffer: 2.386535 +vertex_buffer: 0.202500 +vertex_buffer: 0.227400 +vertex_buffer: 7.813000 +vertex_buffer: 3.863797 +vertex_buffer: 0.525859 +vertex_buffer: 0.213600 +vertex_buffer: 0.353600 +vertex_buffer: 7.661000 +vertex_buffer: 3.899231 +vertex_buffer: 0.543075 +vertex_buffer: 0.202100 +vertex_buffer: 0.350900 +vertex_buffer: 7.638000 +vertex_buffer: 3.394282 +vertex_buffer: 0.536591 +vertex_buffer: 0.168800 +vertex_buffer: 0.350700 +vertex_buffer: 7.645000 +vertex_buffer: 3.370319 +vertex_buffer: 0.535608 +vertex_buffer: 0.166900 +vertex_buffer: 0.350800 +vertex_buffer: 7.638000 +vertex_buffer: 3.459016 +vertex_buffer: 0.538134 +vertex_buffer: 0.173800 +vertex_buffer: 0.350600 +vertex_buffer: 7.641990 +vertex_buffer: 3.362851 +vertex_buffer: 2.349142 +vertex_buffer: 0.250000 +vertex_buffer: 0.233600 +vertex_buffer: 7.649990 +vertex_buffer: 3.388225 +vertex_buffer: 2.349171 +vertex_buffer: 0.248100 +vertex_buffer: 0.233500 +vertex_buffer: 7.649990 +vertex_buffer: 3.457406 +vertex_buffer: 2.350812 +vertex_buffer: 0.243100 +vertex_buffer: 0.233000 +vertex_buffer: 7.877000 +vertex_buffer: 3.866491 +vertex_buffer: -0.200468 +vertex_buffer: 0.211100 +vertex_buffer: 0.402000 +vertex_buffer: 7.732000 +vertex_buffer: 3.902022 +vertex_buffer: -0.186255 +vertex_buffer: 0.200200 +vertex_buffer: 0.399600 +vertex_buffer: 7.710000 +vertex_buffer: 3.397887 +vertex_buffer: -0.190762 +vertex_buffer: 0.167200 +vertex_buffer: 0.399500 +vertex_buffer: 7.716990 +vertex_buffer: 3.373675 +vertex_buffer: -0.192771 +vertex_buffer: 0.165200 +vertex_buffer: 0.399700 +vertex_buffer: 7.710000 +vertex_buffer: 3.460772 +vertex_buffer: -0.190223 +vertex_buffer: 0.172200 +vertex_buffer: 0.399500 +vertex_buffer: 7.967000 +vertex_buffer: 3.449207 +vertex_buffer: -1.802729 +vertex_buffer: 0.041100 +vertex_buffer: 0.531500 +vertex_buffer: 7.949990 +vertex_buffer: 3.442044 +vertex_buffer: -1.802035 +vertex_buffer: 0.042300 +vertex_buffer: 0.531400 +vertex_buffer: 7.850990 +vertex_buffer: 3.441902 +vertex_buffer: -1.797037 +vertex_buffer: 0.048900 +vertex_buffer: 0.531200 +vertex_buffer: 7.879990 +vertex_buffer: 3.379659 +vertex_buffer: -0.689881 +vertex_buffer: 0.049200 +vertex_buffer: 0.457400 +vertex_buffer: 7.770990 +vertex_buffer: 3.379373 +vertex_buffer: -0.680886 +vertex_buffer: 0.056500 +vertex_buffer: 0.457100 +vertex_buffer: 7.897000 +vertex_buffer: 3.387125 +vertex_buffer: -0.691567 +vertex_buffer: 0.047900 +vertex_buffer: 0.457400 +vertex_buffer: 7.749000 +vertex_buffer: 3.386746 +vertex_buffer: -0.679570 +vertex_buffer: 0.058100 +vertex_buffer: 0.457100 +vertex_buffer: 7.740000 +vertex_buffer: 3.366214 +vertex_buffer: -0.195094 +vertex_buffer: 0.060500 +vertex_buffer: 0.425000 +vertex_buffer: 7.716990 +vertex_buffer: 3.373675 +vertex_buffer: -0.192771 +vertex_buffer: 0.062200 +vertex_buffer: 0.425000 +vertex_buffer: 7.667990 +vertex_buffer: 3.361828 +vertex_buffer: 0.533257 +vertex_buffer: 0.069300 +vertex_buffer: 0.377100 +vertex_buffer: 7.645000 +vertex_buffer: 3.370319 +vertex_buffer: 0.535608 +vertex_buffer: 0.071000 +vertex_buffer: 0.377100 +vertex_buffer: 7.853990 +vertex_buffer: 3.366539 +vertex_buffer: -0.205084 +vertex_buffer: 0.052900 +vertex_buffer: 0.425100 +vertex_buffer: 7.788000 +vertex_buffer: 3.362257 +vertex_buffer: 0.520264 +vertex_buffer: 0.061300 +vertex_buffer: 0.377200 +vertex_buffer: 7.617990 +vertex_buffer: 3.353667 +vertex_buffer: 2.352724 +vertex_buffer: 0.086400 +vertex_buffer: 0.257100 +vertex_buffer: 7.483990 +vertex_buffer: 3.353064 +vertex_buffer: 2.370386 +vertex_buffer: 0.095400 +vertex_buffer: 0.256900 +vertex_buffer: 7.872000 +vertex_buffer: 3.374127 +vertex_buffer: -0.206767 +vertex_buffer: 0.051600 +vertex_buffer: 0.425100 +vertex_buffer: 7.806990 +vertex_buffer: 3.370908 +vertex_buffer: 0.517625 +vertex_buffer: 0.059900 +vertex_buffer: 0.377200 +vertex_buffer: 7.834000 +vertex_buffer: 3.449010 +vertex_buffer: -1.795735 +vertex_buffer: 0.050200 +vertex_buffer: 0.531200 +vertex_buffer: 7.352990 +vertex_buffer: 3.341964 +vertex_buffer: 4.875596 +vertex_buffer: 0.127900 +vertex_buffer: 0.090400 +vertex_buffer: 7.197000 +vertex_buffer: 3.342421 +vertex_buffer: 4.899290 +vertex_buffer: 0.138500 +vertex_buffer: 0.090000 +vertex_buffer: 7.191990 +vertex_buffer: 3.355385 +vertex_buffer: 4.963379 +vertex_buffer: 0.139400 +vertex_buffer: 0.085700 +vertex_buffer: 7.192990 +vertex_buffer: 3.345232 +vertex_buffer: 4.943656 +vertex_buffer: 0.139100 +vertex_buffer: 0.087100 +vertex_buffer: 7.349990 +vertex_buffer: 3.344753 +vertex_buffer: 4.918966 +vertex_buffer: 0.128500 +vertex_buffer: 0.087500 +vertex_buffer: 7.348990 +vertex_buffer: 3.354917 +vertex_buffer: 4.939651 +vertex_buffer: 0.128700 +vertex_buffer: 0.086000 +vertex_buffer: 7.380990 +vertex_buffer: 3.353447 +vertex_buffer: 4.914453 +vertex_buffer: 0.126400 +vertex_buffer: 0.087500 +vertex_buffer: 7.371990 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.127000 +vertex_buffer: 0.086300 +vertex_buffer: 7.383990 +vertex_buffer: 3.350628 +vertex_buffer: 4.870098 +vertex_buffer: 0.125700 +vertex_buffer: 0.090500 +vertex_buffer: 7.641990 +vertex_buffer: 3.362851 +vertex_buffer: 2.349142 +vertex_buffer: 0.084600 +vertex_buffer: 0.257100 +vertex_buffer: 7.459990 +vertex_buffer: 3.362018 +vertex_buffer: 2.373692 +vertex_buffer: 0.097100 +vertex_buffer: 0.256900 +vertex_buffer: 7.172990 +vertex_buffer: 3.351269 +vertex_buffer: 4.902964 +vertex_buffer: 0.140200 +vertex_buffer: 0.090000 +vertex_buffer: 7.169990 +vertex_buffer: 3.354093 +vertex_buffer: 4.947350 +vertex_buffer: 0.140700 +vertex_buffer: 0.087000 +vertex_buffer: 7.174990 +vertex_buffer: 3.362181 +vertex_buffer: 4.961206 +vertex_buffer: 0.140600 +vertex_buffer: 0.085800 +vertex_buffer: -0.317000 +vertex_buffer: 4.026103 +vertex_buffer: 5.867753 +vertex_buffer: 0.115200 +vertex_buffer: 0.743500 +vertex_buffer: -0.317000 +vertex_buffer: 4.024064 +vertex_buffer: 5.899689 +vertex_buffer: 0.113300 +vertex_buffer: 0.743500 +vertex_buffer: -0.317000 +vertex_buffer: 4.024140 +vertex_buffer: 5.836836 +vertex_buffer: 0.117100 +vertex_buffer: 0.743500 +vertex_buffer: -0.670000 +vertex_buffer: 4.054759 +vertex_buffer: 5.859921 +vertex_buffer: 0.115200 +vertex_buffer: 0.720700 +vertex_buffer: -1.083000 +vertex_buffer: 4.118413 +vertex_buffer: 5.852974 +vertex_buffer: 0.115200 +vertex_buffer: 0.693300 +vertex_buffer: -1.083000 +vertex_buffer: 4.115381 +vertex_buffer: 5.880229 +vertex_buffer: 0.113300 +vertex_buffer: 0.693300 +vertex_buffer: -0.670000 +vertex_buffer: 4.051723 +vertex_buffer: 5.889535 +vertex_buffer: 0.113300 +vertex_buffer: 0.720700 +vertex_buffer: -0.669000 +vertex_buffer: 4.051794 +vertex_buffer: 5.831267 +vertex_buffer: 0.117100 +vertex_buffer: 0.720700 +vertex_buffer: -1.082000 +vertex_buffer: 4.115445 +vertex_buffer: 5.826654 +vertex_buffer: 0.117100 +vertex_buffer: 0.693300 +vertex_buffer: -1.464000 +vertex_buffer: 4.185110 +vertex_buffer: 5.847359 +vertex_buffer: 0.115200 +vertex_buffer: 0.667700 +vertex_buffer: -1.465000 +vertex_buffer: 4.182078 +vertex_buffer: 5.872258 +vertex_buffer: 0.113300 +vertex_buffer: 0.667700 +vertex_buffer: -1.464000 +vertex_buffer: 4.182139 +vertex_buffer: 5.822321 +vertex_buffer: 0.117100 +vertex_buffer: 0.667700 +vertex_buffer: -1.842000 +vertex_buffer: 4.250652 +vertex_buffer: 5.839960 +vertex_buffer: 0.115200 +vertex_buffer: 0.642000 +vertex_buffer: -1.843000 +vertex_buffer: 4.247624 +vertex_buffer: 5.863575 +vertex_buffer: 0.113300 +vertex_buffer: 0.642000 +vertex_buffer: -1.841000 +vertex_buffer: 4.247682 +vertex_buffer: 5.816204 +vertex_buffer: 0.117100 +vertex_buffer: 0.642000 +vertex_buffer: -2.364000 +vertex_buffer: 4.319502 +vertex_buffer: 5.798289 +vertex_buffer: 0.117100 +vertex_buffer: 0.606500 +vertex_buffer: -2.366000 +vertex_buffer: 4.322475 +vertex_buffer: 5.821735 +vertex_buffer: 0.115200 +vertex_buffer: 0.606500 +vertex_buffer: -2.367000 +vertex_buffer: 4.319444 +vertex_buffer: 5.846098 +vertex_buffer: 0.113300 +vertex_buffer: 0.606500 +vertex_buffer: -3.038000 +vertex_buffer: 4.375773 +vertex_buffer: 5.788562 +vertex_buffer: 0.115200 +vertex_buffer: 0.561200 +vertex_buffer: -3.039000 +vertex_buffer: 4.372745 +vertex_buffer: 5.811448 +vertex_buffer: 0.113300 +vertex_buffer: 0.561200 +vertex_buffer: -3.036000 +vertex_buffer: 4.372801 +vertex_buffer: 5.765585 +vertex_buffer: 0.117100 +vertex_buffer: 0.561200 +vertex_buffer: -3.714000 +vertex_buffer: 4.403336 +vertex_buffer: 5.721646 +vertex_buffer: 0.117100 +vertex_buffer: 0.515600 +vertex_buffer: -3.716000 +vertex_buffer: 4.406310 +vertex_buffer: 5.744192 +vertex_buffer: 0.115200 +vertex_buffer: 0.515600 +vertex_buffer: -3.718000 +vertex_buffer: 4.403282 +vertex_buffer: 5.765681 +vertex_buffer: 0.113300 +vertex_buffer: 0.515600 +vertex_buffer: -4.257000 +vertex_buffer: 4.407372 +vertex_buffer: 5.693258 +vertex_buffer: 0.115200 +vertex_buffer: 0.479100 +vertex_buffer: -4.259000 +vertex_buffer: 4.404346 +vertex_buffer: 5.714486 +vertex_buffer: 0.113300 +vertex_buffer: 0.479100 +vertex_buffer: -4.254000 +vertex_buffer: 4.404398 +vertex_buffer: 5.671983 +vertex_buffer: 0.117100 +vertex_buffer: 0.479100 +vertex_buffer: -4.709000 +vertex_buffer: 4.387179 +vertex_buffer: 5.639311 +vertex_buffer: 0.115200 +vertex_buffer: 0.448800 +vertex_buffer: -4.706000 +vertex_buffer: 4.384205 +vertex_buffer: 5.617180 +vertex_buffer: 0.117100 +vertex_buffer: 0.448800 +vertex_buffer: -4.711000 +vertex_buffer: 4.384153 +vertex_buffer: 5.660390 +vertex_buffer: 0.113300 +vertex_buffer: 0.448800 +vertex_buffer: -5.122000 +vertex_buffer: 4.352591 +vertex_buffer: 5.582086 +vertex_buffer: 0.115200 +vertex_buffer: 0.421000 +vertex_buffer: -5.125000 +vertex_buffer: 4.349563 +vertex_buffer: 5.603030 +vertex_buffer: 0.113300 +vertex_buffer: 0.421000 +vertex_buffer: -5.119000 +vertex_buffer: 4.349615 +vertex_buffer: 5.561021 +vertex_buffer: 0.117100 +vertex_buffer: 0.421000 +vertex_buffer: -5.539000 +vertex_buffer: 4.291658 +vertex_buffer: 5.495723 +vertex_buffer: 0.117100 +vertex_buffer: 0.392600 +vertex_buffer: -5.542000 +vertex_buffer: 4.293632 +vertex_buffer: 5.516748 +vertex_buffer: 0.115200 +vertex_buffer: 0.392600 +vertex_buffer: -5.545000 +vertex_buffer: 4.290609 +vertex_buffer: 5.537658 +vertex_buffer: 0.113300 +vertex_buffer: 0.392600 +vertex_buffer: -6.018000 +vertex_buffer: 4.203568 +vertex_buffer: 5.459014 +vertex_buffer: 0.113300 +vertex_buffer: 0.360400 +vertex_buffer: -6.015000 +vertex_buffer: 4.205595 +vertex_buffer: 5.438093 +vertex_buffer: 0.115200 +vertex_buffer: 0.360400 +vertex_buffer: -6.012000 +vertex_buffer: 4.203619 +vertex_buffer: 5.417067 +vertex_buffer: 0.117100 +vertex_buffer: 0.360400 +vertex_buffer: -6.398000 +vertex_buffer: 4.123437 +vertex_buffer: 5.353050 +vertex_buffer: 0.117100 +vertex_buffer: 0.333900 +vertex_buffer: -6.402000 +vertex_buffer: 4.126411 +vertex_buffer: 5.374100 +vertex_buffer: 0.115200 +vertex_buffer: 0.333900 +vertex_buffer: -6.405000 +vertex_buffer: 4.123386 +vertex_buffer: 5.394981 +vertex_buffer: 0.113300 +vertex_buffer: 0.333900 +vertex_buffer: -0.317000 +vertex_buffer: 4.016156 +vertex_buffer: 5.825611 +vertex_buffer: 0.117800 +vertex_buffer: 0.743500 +vertex_buffer: -0.318000 +vertex_buffer: 3.995154 +vertex_buffer: 5.825060 +vertex_buffer: 0.119200 +vertex_buffer: 0.743500 +vertex_buffer: -0.317000 +vertex_buffer: 4.016054 +vertex_buffer: 5.909416 +vertex_buffer: 0.112500 +vertex_buffer: 0.743500 +vertex_buffer: -0.318999 +vertex_buffer: 3.995053 +vertex_buffer: 5.908851 +vertex_buffer: 0.111100 +vertex_buffer: 0.743500 +vertex_buffer: -1.082000 +vertex_buffer: 4.107457 +vertex_buffer: 5.816597 +vertex_buffer: 0.117800 +vertex_buffer: 0.693300 +vertex_buffer: -0.669000 +vertex_buffer: 4.043805 +vertex_buffer: 5.821159 +vertex_buffer: 0.117800 +vertex_buffer: 0.720700 +vertex_buffer: -1.082000 +vertex_buffer: 4.086458 +vertex_buffer: 5.816010 +vertex_buffer: 0.119200 +vertex_buffer: 0.693300 +vertex_buffer: -0.669000 +vertex_buffer: 4.022807 +vertex_buffer: 5.820588 +vertex_buffer: 0.119200 +vertex_buffer: 0.720700 +vertex_buffer: -1.083000 +vertex_buffer: 4.107370 +vertex_buffer: 5.888755 +vertex_buffer: 0.112500 +vertex_buffer: 0.693300 +vertex_buffer: -0.670000 +vertex_buffer: 4.043711 +vertex_buffer: 5.899221 +vertex_buffer: 0.112500 +vertex_buffer: 0.720700 +vertex_buffer: -1.083000 +vertex_buffer: 4.086370 +vertex_buffer: 5.888166 +vertex_buffer: 0.111100 +vertex_buffer: 0.693300 +vertex_buffer: -0.670000 +vertex_buffer: 4.022712 +vertex_buffer: 5.898648 +vertex_buffer: 0.111100 +vertex_buffer: 0.720700 +vertex_buffer: -1.463000 +vertex_buffer: 4.174151 +vertex_buffer: 5.813412 +vertex_buffer: 0.117800 +vertex_buffer: 0.667700 +vertex_buffer: -1.463000 +vertex_buffer: 4.153151 +vertex_buffer: 5.812834 +vertex_buffer: 0.119200 +vertex_buffer: 0.667700 +vertex_buffer: -1.465000 +vertex_buffer: 4.174070 +vertex_buffer: 5.879640 +vertex_buffer: 0.112500 +vertex_buffer: 0.667700 +vertex_buffer: -1.465000 +vertex_buffer: 4.153070 +vertex_buffer: 5.879068 +vertex_buffer: 0.111100 +vertex_buffer: 0.667700 +vertex_buffer: -2.364000 +vertex_buffer: 4.311512 +vertex_buffer: 5.790699 +vertex_buffer: 0.117800 +vertex_buffer: 0.606500 +vertex_buffer: -1.841000 +vertex_buffer: 4.239691 +vertex_buffer: 5.807399 +vertex_buffer: 0.117800 +vertex_buffer: 0.642000 +vertex_buffer: -1.840000 +vertex_buffer: 4.218691 +vertex_buffer: 5.806841 +vertex_buffer: 0.119200 +vertex_buffer: 0.642000 +vertex_buffer: -2.363000 +vertex_buffer: 4.291511 +vertex_buffer: 5.790249 +vertex_buffer: 0.119200 +vertex_buffer: 0.606500 +vertex_buffer: -1.843000 +vertex_buffer: 4.239612 +vertex_buffer: 5.871983 +vertex_buffer: 0.112500 +vertex_buffer: 0.642000 +vertex_buffer: -2.367000 +vertex_buffer: 4.311437 +vertex_buffer: 5.853360 +vertex_buffer: 0.112500 +vertex_buffer: 0.606500 +vertex_buffer: -2.367000 +vertex_buffer: 4.291435 +vertex_buffer: 5.852943 +vertex_buffer: 0.111100 +vertex_buffer: 0.606500 +vertex_buffer: -1.843000 +vertex_buffer: 4.218612 +vertex_buffer: 5.871453 +vertex_buffer: 0.111100 +vertex_buffer: 0.642000 +vertex_buffer: -3.035000 +vertex_buffer: 4.364810 +vertex_buffer: 5.758161 +vertex_buffer: 0.117800 +vertex_buffer: 0.561200 +vertex_buffer: -3.713000 +vertex_buffer: 4.395346 +vertex_buffer: 5.714444 +vertex_buffer: 0.117800 +vertex_buffer: 0.515600 +vertex_buffer: -3.035000 +vertex_buffer: 4.344810 +vertex_buffer: 5.757899 +vertex_buffer: 0.119200 +vertex_buffer: 0.561200 +vertex_buffer: -3.713000 +vertex_buffer: 4.375346 +vertex_buffer: 5.714359 +vertex_buffer: 0.119200 +vertex_buffer: 0.515600 +vertex_buffer: -3.040000 +vertex_buffer: 4.364735 +vertex_buffer: 5.819693 +vertex_buffer: 0.112500 +vertex_buffer: 0.561200 +vertex_buffer: -3.718000 +vertex_buffer: 4.395276 +vertex_buffer: 5.772814 +vertex_buffer: 0.112500 +vertex_buffer: 0.515600 +vertex_buffer: -3.039000 +vertex_buffer: 4.344735 +vertex_buffer: 5.819412 +vertex_buffer: 0.111100 +vertex_buffer: 0.561200 +vertex_buffer: -3.718000 +vertex_buffer: 4.375276 +vertex_buffer: 5.772729 +vertex_buffer: 0.111100 +vertex_buffer: 0.515600 +vertex_buffer: -4.253000 +vertex_buffer: 4.396406 +vertex_buffer: 5.664846 +vertex_buffer: 0.117800 +vertex_buffer: 0.479100 +vertex_buffer: -4.705000 +vertex_buffer: 4.376215 +vertex_buffer: 5.610039 +vertex_buffer: 0.117800 +vertex_buffer: 0.448800 +vertex_buffer: -4.253000 +vertex_buffer: 4.375406 +vertex_buffer: 5.664687 +vertex_buffer: 0.119200 +vertex_buffer: 0.479100 +vertex_buffer: -4.705000 +vertex_buffer: 4.355214 +vertex_buffer: 5.609741 +vertex_buffer: 0.119200 +vertex_buffer: 0.448800 +vertex_buffer: -4.259000 +vertex_buffer: 4.396337 +vertex_buffer: 5.721516 +vertex_buffer: 0.112500 +vertex_buffer: 0.479100 +vertex_buffer: -4.712000 +vertex_buffer: 4.376145 +vertex_buffer: 5.667303 +vertex_buffer: 0.112500 +vertex_buffer: 0.448800 +vertex_buffer: -4.711000 +vertex_buffer: 4.355145 +vertex_buffer: 5.667006 +vertex_buffer: 0.111100 +vertex_buffer: 0.448800 +vertex_buffer: -4.259000 +vertex_buffer: 4.375338 +vertex_buffer: 5.721359 +vertex_buffer: 0.111100 +vertex_buffer: 0.479100 +vertex_buffer: -5.118000 +vertex_buffer: 4.341623 +vertex_buffer: 5.553867 +vertex_buffer: 0.117800 +vertex_buffer: 0.421000 +vertex_buffer: -5.538000 +vertex_buffer: 4.282668 +vertex_buffer: 5.488510 +vertex_buffer: 0.117800 +vertex_buffer: 0.392600 +vertex_buffer: -5.118000 +vertex_buffer: 4.320625 +vertex_buffer: 5.553451 +vertex_buffer: 0.119200 +vertex_buffer: 0.421000 +vertex_buffer: -5.537000 +vertex_buffer: 4.262668 +vertex_buffer: 5.488038 +vertex_buffer: 0.119200 +vertex_buffer: 0.392600 +vertex_buffer: -5.125000 +vertex_buffer: 4.341555 +vertex_buffer: 5.609896 +vertex_buffer: 0.112500 +vertex_buffer: 0.421000 +vertex_buffer: -5.546000 +vertex_buffer: 4.282600 +vertex_buffer: 5.544456 +vertex_buffer: 0.112500 +vertex_buffer: 0.392600 +vertex_buffer: -5.545000 +vertex_buffer: 4.262599 +vertex_buffer: 5.544984 +vertex_buffer: 0.111100 +vertex_buffer: 0.392600 +vertex_buffer: -5.125000 +vertex_buffer: 4.320557 +vertex_buffer: 5.609480 +vertex_buffer: 0.111100 +vertex_buffer: 0.421000 +vertex_buffer: -6.011000 +vertex_buffer: 4.195629 +vertex_buffer: 5.409849 +vertex_buffer: 0.117800 +vertex_buffer: 0.360400 +vertex_buffer: -6.397000 +vertex_buffer: 4.115445 +vertex_buffer: 5.345820 +vertex_buffer: 0.117800 +vertex_buffer: 0.333900 +vertex_buffer: -6.010000 +vertex_buffer: 4.174628 +vertex_buffer: 5.409264 +vertex_buffer: 0.119200 +vertex_buffer: 0.360400 +vertex_buffer: -6.396000 +vertex_buffer: 4.094446 +vertex_buffer: 5.345189 +vertex_buffer: 0.119200 +vertex_buffer: 0.333900 +vertex_buffer: -6.019000 +vertex_buffer: 4.194561 +vertex_buffer: 5.465749 +vertex_buffer: 0.112500 +vertex_buffer: 0.360400 +vertex_buffer: -6.406000 +vertex_buffer: 4.115377 +vertex_buffer: 5.401730 +vertex_buffer: 0.112500 +vertex_buffer: 0.333900 +vertex_buffer: -6.018000 +vertex_buffer: 4.174561 +vertex_buffer: 5.465191 +vertex_buffer: 0.111100 +vertex_buffer: 0.360400 +vertex_buffer: -6.405000 +vertex_buffer: 4.094377 +vertex_buffer: 5.401098 +vertex_buffer: 0.111100 +vertex_buffer: 0.333900 +vertex_buffer: -6.663430 +vertex_buffer: 4.069083 +vertex_buffer: 5.334795 +vertex_buffer: 0.115200 +vertex_buffer: 0.318300 +vertex_buffer: -6.667440 +vertex_buffer: 4.066058 +vertex_buffer: 5.355673 +vertex_buffer: 0.113300 +vertex_buffer: 0.318300 +vertex_buffer: -6.660440 +vertex_buffer: 4.066109 +vertex_buffer: 5.313739 +vertex_buffer: 0.117100 +vertex_buffer: 0.318300 +vertex_buffer: -6.658440 +vertex_buffer: 4.058116 +vertex_buffer: 5.306510 +vertex_buffer: 0.117800 +vertex_buffer: 0.318300 +vertex_buffer: -6.657430 +vertex_buffer: 4.038118 +vertex_buffer: 5.305898 +vertex_buffer: 0.119200 +vertex_buffer: 0.318300 +vertex_buffer: -6.667440 +vertex_buffer: 4.037049 +vertex_buffer: 5.361777 +vertex_buffer: 0.111100 +vertex_buffer: 0.318300 +vertex_buffer: -6.663430 +vertex_buffer: 3.946052 +vertex_buffer: 5.360036 +vertex_buffer: 0.104900 +vertex_buffer: 0.318300 +vertex_buffer: -6.402000 +vertex_buffer: 4.002382 +vertex_buffer: 5.398364 +vertex_buffer: 0.104900 +vertex_buffer: 0.333900 +vertex_buffer: -6.678430 +vertex_buffer: 3.942064 +vertex_buffer: 5.349873 +vertex_buffer: 0.113300 +vertex_buffer: 0.309200 +vertex_buffer: -6.682430 +vertex_buffer: 4.033060 +vertex_buffer: 5.352612 +vertex_buffer: 0.113300 +vertex_buffer: 0.315300 +vertex_buffer: -6.683440 +vertex_buffer: 4.032087 +vertex_buffer: 5.330598 +vertex_buffer: 0.115200 +vertex_buffer: 0.315300 +vertex_buffer: -6.680430 +vertex_buffer: 3.940090 +vertex_buffer: 5.327828 +vertex_buffer: 0.115200 +vertex_buffer: 0.309200 +vertex_buffer: -6.675430 +vertex_buffer: 4.034112 +vertex_buffer: 5.310711 +vertex_buffer: 0.117100 +vertex_buffer: 0.315300 +vertex_buffer: -6.671430 +vertex_buffer: 3.942115 +vertex_buffer: 5.307944 +vertex_buffer: 0.117100 +vertex_buffer: 0.309200 +vertex_buffer: -0.323999 +vertex_buffer: 3.891866 +vertex_buffer: 5.822424 +vertex_buffer: 0.125300 +vertex_buffer: 0.743500 +vertex_buffer: -0.327999 +vertex_buffer: 3.891764 +vertex_buffer: 5.906166 +vertex_buffer: 0.104900 +vertex_buffer: 0.743500 +vertex_buffer: -0.669000 +vertex_buffer: 3.913872 +vertex_buffer: 5.817708 +vertex_buffer: 0.125300 +vertex_buffer: 0.720700 +vertex_buffer: -1.081000 +vertex_buffer: 3.967878 +vertex_buffer: 5.812775 +vertex_buffer: 0.125300 +vertex_buffer: 0.693300 +vertex_buffer: -1.083000 +vertex_buffer: 3.967790 +vertex_buffer: 5.884938 +vertex_buffer: 0.104900 +vertex_buffer: 0.693300 +vertex_buffer: -0.672999 +vertex_buffer: 3.913777 +vertex_buffer: 5.895747 +vertex_buffer: 0.104900 +vertex_buffer: 0.720700 +vertex_buffer: -1.462000 +vertex_buffer: 4.038882 +vertex_buffer: 5.809780 +vertex_buffer: 0.125300 +vertex_buffer: 0.667700 +vertex_buffer: -1.464000 +vertex_buffer: 4.038802 +vertex_buffer: 5.876020 +vertex_buffer: 0.104900 +vertex_buffer: 0.667700 +vertex_buffer: -1.840000 +vertex_buffer: 4.110889 +vertex_buffer: 5.804165 +vertex_buffer: 0.125300 +vertex_buffer: 0.642000 +vertex_buffer: -2.362000 +vertex_buffer: 4.186910 +vertex_buffer: 5.788058 +vertex_buffer: 0.125300 +vertex_buffer: 0.606500 +vertex_buffer: -2.366000 +vertex_buffer: 4.186833 +vertex_buffer: 5.850768 +vertex_buffer: 0.104900 +vertex_buffer: 0.606500 +vertex_buffer: -1.842000 +vertex_buffer: 4.110811 +vertex_buffer: 5.868769 +vertex_buffer: 0.104900 +vertex_buffer: 0.642000 +vertex_buffer: -3.034000 +vertex_buffer: 4.240947 +vertex_buffer: 5.756465 +vertex_buffer: 0.125300 +vertex_buffer: 0.561200 +vertex_buffer: -3.711000 +vertex_buffer: 4.269999 +vertex_buffer: 5.713801 +vertex_buffer: 0.125300 +vertex_buffer: 0.515600 +vertex_buffer: -3.716000 +vertex_buffer: 4.269928 +vertex_buffer: 5.772200 +vertex_buffer: 0.104900 +vertex_buffer: 0.515600 +vertex_buffer: -3.038000 +vertex_buffer: 4.240872 +vertex_buffer: 5.817998 +vertex_buffer: 0.104900 +vertex_buffer: 0.561200 +vertex_buffer: -4.251000 +vertex_buffer: 4.271059 +vertex_buffer: 5.663842 +vertex_buffer: 0.125300 +vertex_buffer: 0.479100 +vertex_buffer: -4.702000 +vertex_buffer: 4.250126 +vertex_buffer: 5.608262 +vertex_buffer: 0.125300 +vertex_buffer: 0.448800 +vertex_buffer: -4.257000 +vertex_buffer: 4.270990 +vertex_buffer: 5.720535 +vertex_buffer: 0.104900 +vertex_buffer: 0.479100 +vertex_buffer: -4.709000 +vertex_buffer: 4.249059 +vertex_buffer: 5.665525 +vertex_buffer: 0.104900 +vertex_buffer: 0.448800 +vertex_buffer: -5.115000 +vertex_buffer: 4.213196 +vertex_buffer: 5.551396 +vertex_buffer: 0.125300 +vertex_buffer: 0.421000 +vertex_buffer: -5.534000 +vertex_buffer: 4.156276 +vertex_buffer: 5.485503 +vertex_buffer: 0.125300 +vertex_buffer: 0.392600 +vertex_buffer: -5.542000 +vertex_buffer: 4.156207 +vertex_buffer: 5.542446 +vertex_buffer: 0.104900 +vertex_buffer: 0.392600 +vertex_buffer: -5.122000 +vertex_buffer: 4.213128 +vertex_buffer: 5.607423 +vertex_buffer: 0.104900 +vertex_buffer: 0.421000 +vertex_buffer: -6.007000 +vertex_buffer: 4.075372 +vertex_buffer: 5.406505 +vertex_buffer: 0.125300 +vertex_buffer: 0.360400 +vertex_buffer: -6.393000 +vertex_buffer: 4.002450 +vertex_buffer: 5.342460 +vertex_buffer: 0.125300 +vertex_buffer: 0.333900 +vertex_buffer: -6.015000 +vertex_buffer: 4.075303 +vertex_buffer: 5.463432 +vertex_buffer: 0.104900 +vertex_buffer: 0.360400 +vertex_buffer: -6.654430 +vertex_buffer: 3.946120 +vertex_buffer: 5.304128 +vertex_buffer: 0.125300 +vertex_buffer: 0.318300 +vertex_buffer: -6.667440 +vertex_buffer: 4.058049 +vertex_buffer: 5.362424 +vertex_buffer: 0.112500 +vertex_buffer: 0.318300 +vertex_buffer: -6.682430 +vertex_buffer: 4.054059 +vertex_buffer: 5.353259 +vertex_buffer: 0.113300 +vertex_buffer: 0.316700 +vertex_buffer: -6.684430 +vertex_buffer: 4.053086 +vertex_buffer: 5.331241 +vertex_buffer: 0.115200 +vertex_buffer: 0.316700 +vertex_buffer: -6.677440 +vertex_buffer: 4.061067 +vertex_buffer: 5.348493 +vertex_buffer: 0.113300 +vertex_buffer: 0.317300 +vertex_buffer: -6.679430 +vertex_buffer: 4.062085 +vertex_buffer: 5.332533 +vertex_buffer: 0.115200 +vertex_buffer: 0.317300 +vertex_buffer: -6.672430 +vertex_buffer: 4.061104 +vertex_buffer: 5.317541 +vertex_buffer: 0.117100 +vertex_buffer: 0.317300 +vertex_buffer: -6.675430 +vertex_buffer: 4.054110 +vertex_buffer: 5.311325 +vertex_buffer: 0.117100 +vertex_buffer: 0.316700 +vertex_buffer: -6.675430 +vertex_buffer: 4.034112 +vertex_buffer: 5.310711 +vertex_buffer: 0.119200 +vertex_buffer: 0.317300 +vertex_buffer: -6.675430 +vertex_buffer: 4.054110 +vertex_buffer: 5.311325 +vertex_buffer: 0.117800 +vertex_buffer: 0.317300 +vertex_buffer: -6.682430 +vertex_buffer: 4.033060 +vertex_buffer: 5.352612 +vertex_buffer: 0.111100 +vertex_buffer: 0.317300 +vertex_buffer: -6.678430 +vertex_buffer: 3.942064 +vertex_buffer: 5.349873 +vertex_buffer: 0.104900 +vertex_buffer: 0.317300 +vertex_buffer: -6.671430 +vertex_buffer: 3.942115 +vertex_buffer: 5.307944 +vertex_buffer: 0.125300 +vertex_buffer: 0.317300 +vertex_buffer: -6.682430 +vertex_buffer: 4.054059 +vertex_buffer: 5.353259 +vertex_buffer: 0.112500 +vertex_buffer: 0.317300 +vertex_buffer: -8.035000 +vertex_buffer: 3.803084 +vertex_buffer: -3.073275 +vertex_buffer: 0.196600 +vertex_buffer: 0.592000 +vertex_buffer: -8.053000 +vertex_buffer: 3.796094 +vertex_buffer: -3.073513 +vertex_buffer: 0.198000 +vertex_buffer: 0.592300 +vertex_buffer: -7.965990 +vertex_buffer: 3.860461 +vertex_buffer: -1.788585 +vertex_buffer: 0.203100 +vertex_buffer: 0.507000 +vertex_buffer: -7.945990 +vertex_buffer: 3.868423 +vertex_buffer: -1.787311 +vertex_buffer: 0.201700 +vertex_buffer: 0.506700 +vertex_buffer: -7.874000 +vertex_buffer: 3.893830 +vertex_buffer: -0.680814 +vertex_buffer: 0.205900 +vertex_buffer: 0.433200 +vertex_buffer: -7.896000 +vertex_buffer: 3.885868 +vertex_buffer: -0.682088 +vertex_buffer: 0.207400 +vertex_buffer: 0.433600 +vertex_buffer: -7.380000 +vertex_buffer: 3.852270 +vertex_buffer: 4.933776 +vertex_buffer: 0.224200 +vertex_buffer: 0.054600 +vertex_buffer: -7.372000 +vertex_buffer: 3.872185 +vertex_buffer: 4.929770 +vertex_buffer: 0.222400 +vertex_buffer: 0.054500 +vertex_buffer: -7.380990 +vertex_buffer: 3.877836 +vertex_buffer: 4.914071 +vertex_buffer: 0.222500 +vertex_buffer: 0.055900 +vertex_buffer: -7.392000 +vertex_buffer: 3.851786 +vertex_buffer: 4.912165 +vertex_buffer: 0.224200 +vertex_buffer: 0.056300 +vertex_buffer: -7.395000 +vertex_buffer: 3.848810 +vertex_buffer: 4.868700 +vertex_buffer: 0.224100 +vertex_buffer: 0.059300 +vertex_buffer: -7.384000 +vertex_buffer: 3.875837 +vertex_buffer: 4.869610 +vertex_buffer: 0.222300 +vertex_buffer: 0.059000 +vertex_buffer: -7.395000 +vertex_buffer: 3.776789 +vertex_buffer: 4.868576 +vertex_buffer: 0.228900 +vertex_buffer: 0.059900 +vertex_buffer: -7.392000 +vertex_buffer: 3.778766 +vertex_buffer: 4.912027 +vertex_buffer: 0.229100 +vertex_buffer: 0.056900 +vertex_buffer: -7.380000 +vertex_buffer: 3.780248 +vertex_buffer: 4.933626 +vertex_buffer: 0.229100 +vertex_buffer: 0.055200 +vertex_buffer: -7.888000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.183300 +vertex_buffer: 0.914400 +vertex_buffer: -7.925000 +vertex_buffer: 2.694416 +vertex_buffer: -7.491223 +vertex_buffer: 0.182100 +vertex_buffer: 0.905500 +vertex_buffer: -7.934000 +vertex_buffer: 2.768480 +vertex_buffer: -7.441971 +vertex_buffer: 0.176600 +vertex_buffer: 0.898900 +vertex_buffer: -7.905000 +vertex_buffer: 2.747339 +vertex_buffer: -7.532923 +vertex_buffer: 0.173800 +vertex_buffer: 0.905600 +vertex_buffer: -7.897000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.196300 +vertex_buffer: 0.913300 +vertex_buffer: -7.934000 +vertex_buffer: 2.598798 +vertex_buffer: -7.395112 +vertex_buffer: 0.194600 +vertex_buffer: 0.904500 +vertex_buffer: -7.928990 +vertex_buffer: 3.775072 +vertex_buffer: -3.073229 +vertex_buffer: 0.188100 +vertex_buffer: 0.591800 +vertex_buffer: -7.933000 +vertex_buffer: 3.796029 +vertex_buffer: -3.071515 +vertex_buffer: 0.189600 +vertex_buffer: 0.591400 +vertex_buffer: -7.831990 +vertex_buffer: 3.860223 +vertex_buffer: -1.781595 +vertex_buffer: 0.194100 +vertex_buffer: 0.505600 +vertex_buffer: -7.828000 +vertex_buffer: 3.836232 +vertex_buffer: -1.782403 +vertex_buffer: 0.192400 +vertex_buffer: 0.505900 +vertex_buffer: -7.747990 +vertex_buffer: 3.885460 +vertex_buffer: -0.670087 +vertex_buffer: 0.197600 +vertex_buffer: 0.431600 +vertex_buffer: -7.742000 +vertex_buffer: 3.858442 +vertex_buffer: -0.670008 +vertex_buffer: 0.195700 +vertex_buffer: 0.431800 +vertex_buffer: -7.828000 +vertex_buffer: 3.775273 +vertex_buffer: -1.784482 +vertex_buffer: 0.188400 +vertex_buffer: 0.506300 +vertex_buffer: -7.742000 +vertex_buffer: 3.787447 +vertex_buffer: -0.671427 +vertex_buffer: 0.191000 +vertex_buffer: 0.432000 +vertex_buffer: -7.928990 +vertex_buffer: 3.721103 +vertex_buffer: -3.075061 +vertex_buffer: 0.184400 +vertex_buffer: 0.592400 +vertex_buffer: -7.169000 +vertex_buffer: 3.853018 +vertex_buffer: 4.967149 +vertex_buffer: 0.209200 +vertex_buffer: 0.048900 +vertex_buffer: -7.175000 +vertex_buffer: 3.872894 +vertex_buffer: 4.961336 +vertex_buffer: 0.209100 +vertex_buffer: 0.050600 +vertex_buffer: -7.192000 +vertex_buffer: 3.879943 +vertex_buffer: 4.963492 +vertex_buffer: 0.210400 +vertex_buffer: 0.050900 +vertex_buffer: -7.191000 +vertex_buffer: 3.854091 +vertex_buffer: 4.970315 +vertex_buffer: 0.210700 +vertex_buffer: 0.049300 +vertex_buffer: -7.349000 +vertex_buffer: 3.879403 +vertex_buffer: 4.939404 +vertex_buffer: 0.220600 +vertex_buffer: 0.053900 +vertex_buffer: -7.347990 +vertex_buffer: 3.853528 +vertex_buffer: 4.945254 +vertex_buffer: 0.221000 +vertex_buffer: 0.052200 +vertex_buffer: -7.191000 +vertex_buffer: 3.781066 +vertex_buffer: 4.970090 +vertex_buffer: 0.212100 +vertex_buffer: 0.044600 +vertex_buffer: -7.347990 +vertex_buffer: 3.780507 +vertex_buffer: 4.945086 +vertex_buffer: 0.222300 +vertex_buffer: 0.047500 +vertex_buffer: -7.169000 +vertex_buffer: 3.780995 +vertex_buffer: 4.966923 +vertex_buffer: 0.210600 +vertex_buffer: 0.044200 +vertex_buffer: -8.088990 +vertex_buffer: 3.680678 +vertex_buffer: -4.093029 +vertex_buffer: 0.192100 +vertex_buffer: 0.659800 +vertex_buffer: -8.106000 +vertex_buffer: 3.673678 +vertex_buffer: -4.093265 +vertex_buffer: 0.193400 +vertex_buffer: 0.660000 +vertex_buffer: -8.121000 +vertex_buffer: 3.479514 +vertex_buffer: -4.987499 +vertex_buffer: 0.189100 +vertex_buffer: 0.720200 +vertex_buffer: -8.104000 +vertex_buffer: 3.485179 +vertex_buffer: -4.988288 +vertex_buffer: 0.187800 +vertex_buffer: 0.720000 +vertex_buffer: -7.988000 +vertex_buffer: 3.650700 +vertex_buffer: -4.094052 +vertex_buffer: 0.183500 +vertex_buffer: 0.659800 +vertex_buffer: -7.992000 +vertex_buffer: 3.673678 +vertex_buffer: -4.093265 +vertex_buffer: 0.185200 +vertex_buffer: 0.659400 +vertex_buffer: -7.988000 +vertex_buffer: 3.596764 +vertex_buffer: -4.096878 +vertex_buffer: 0.179800 +vertex_buffer: 0.660700 +vertex_buffer: -8.003000 +vertex_buffer: 3.417072 +vertex_buffer: -4.978396 +vertex_buffer: 0.174700 +vertex_buffer: 0.720500 +vertex_buffer: -8.003000 +vertex_buffer: 3.461763 +vertex_buffer: -4.984319 +vertex_buffer: 0.179100 +vertex_buffer: 0.719900 +vertex_buffer: -8.007000 +vertex_buffer: 3.479514 +vertex_buffer: -4.987499 +vertex_buffer: 0.180900 +vertex_buffer: 0.719700 +vertex_buffer: -8.003990 +vertex_buffer: 3.352830 +vertex_buffer: -4.109857 +vertex_buffer: 0.040600 +vertex_buffer: 0.685900 +vertex_buffer: -7.992000 +vertex_buffer: 3.357945 +vertex_buffer: -4.109599 +vertex_buffer: 0.041500 +vertex_buffer: 0.685800 +vertex_buffer: -7.934000 +vertex_buffer: 3.447359 +vertex_buffer: -3.089672 +vertex_buffer: 0.044900 +vertex_buffer: 0.617400 +vertex_buffer: -7.947990 +vertex_buffer: 3.442067 +vertex_buffer: -3.089904 +vertex_buffer: 0.043900 +vertex_buffer: 0.617400 +vertex_buffer: -8.036000 +vertex_buffer: 3.442122 +vertex_buffer: -3.091901 +vertex_buffer: 0.038000 +vertex_buffer: 0.617500 +vertex_buffer: -8.088990 +vertex_buffer: 3.352830 +vertex_buffer: -4.109857 +vertex_buffer: 0.034900 +vertex_buffer: 0.685900 +vertex_buffer: -8.107000 +vertex_buffer: 3.171661 +vertex_buffer: -4.940374 +vertex_buffer: 0.033700 +vertex_buffer: 0.743900 +vertex_buffer: -8.022000 +vertex_buffer: 3.171661 +vertex_buffer: -4.940374 +vertex_buffer: 0.039400 +vertex_buffer: 0.744000 +vertex_buffer: -8.009000 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.040400 +vertex_buffer: 0.743800 +vertex_buffer: -8.107000 +vertex_buffer: 3.357945 +vertex_buffer: -4.109599 +vertex_buffer: 0.217200 +vertex_buffer: 0.668000 +vertex_buffer: -8.053000 +vertex_buffer: 3.447424 +vertex_buffer: -3.091670 +vertex_buffer: 0.221700 +vertex_buffer: 0.598600 +vertex_buffer: -8.058990 +vertex_buffer: 3.464815 +vertex_buffer: -3.089903 +vertex_buffer: 0.220100 +vertex_buffer: 0.598100 +vertex_buffer: -8.112000 +vertex_buffer: 3.375448 +vertex_buffer: -4.108757 +vertex_buffer: 0.215600 +vertex_buffer: 0.667500 +vertex_buffer: -8.058990 +vertex_buffer: 3.514396 +vertex_buffer: -3.087097 +vertex_buffer: 0.216600 +vertex_buffer: 0.597100 +vertex_buffer: -8.112000 +vertex_buffer: 3.415554 +vertex_buffer: -4.105842 +vertex_buffer: 0.212000 +vertex_buffer: 0.666200 +vertex_buffer: -8.129000 +vertex_buffer: 3.196587 +vertex_buffer: -4.943947 +vertex_buffer: 0.215600 +vertex_buffer: 0.726900 +vertex_buffer: -8.129000 +vertex_buffer: 3.242119 +vertex_buffer: -4.950491 +vertex_buffer: 0.211300 +vertex_buffer: 0.725700 +vertex_buffer: -8.123990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.217500 +vertex_buffer: 0.727300 +vertex_buffer: -8.085000 +vertex_buffer: 3.292145 +vertex_buffer: -5.899772 +vertex_buffer: 0.182700 +vertex_buffer: 0.782700 +vertex_buffer: -8.102000 +vertex_buffer: 3.285218 +vertex_buffer: -5.896081 +vertex_buffer: 0.183700 +vertex_buffer: 0.782700 +vertex_buffer: -8.038000 +vertex_buffer: 3.086942 +vertex_buffer: -6.694157 +vertex_buffer: 0.178000 +vertex_buffer: 0.838900 +vertex_buffer: -8.054990 +vertex_buffer: 3.078037 +vertex_buffer: -6.686757 +vertex_buffer: 0.178700 +vertex_buffer: 0.838700 +vertex_buffer: -7.984000 +vertex_buffer: 3.265013 +vertex_buffer: -5.884281 +vertex_buffer: 0.174900 +vertex_buffer: 0.782200 +vertex_buffer: -7.987000 +vertex_buffer: 3.285218 +vertex_buffer: -5.896081 +vertex_buffer: 0.176600 +vertex_buffer: 0.782500 +vertex_buffer: -7.985000 +vertex_buffer: 3.209211 +vertex_buffer: -5.851367 +vertex_buffer: 0.169400 +vertex_buffer: 0.781500 +vertex_buffer: -7.941000 +vertex_buffer: 2.980811 +vertex_buffer: -6.599105 +vertex_buffer: 0.163500 +vertex_buffer: 0.836100 +vertex_buffer: -7.938000 +vertex_buffer: 3.050547 +vertex_buffer: -6.662533 +vertex_buffer: 0.170600 +vertex_buffer: 0.838100 +vertex_buffer: -7.941000 +vertex_buffer: 3.078037 +vertex_buffer: -6.686757 +vertex_buffer: 0.172700 +vertex_buffer: 0.838800 +vertex_buffer: -8.009000 +vertex_buffer: 2.894458 +vertex_buffer: -5.665260 +vertex_buffer: 0.039000 +vertex_buffer: 0.799600 +vertex_buffer: -7.995990 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.040100 +vertex_buffer: 0.799500 +vertex_buffer: -8.093990 +vertex_buffer: 2.894458 +vertex_buffer: -5.665260 +vertex_buffer: 0.033300 +vertex_buffer: 0.799400 +vertex_buffer: -8.060000 +vertex_buffer: 2.609329 +vertex_buffer: -6.232546 +vertex_buffer: 0.033500 +vertex_buffer: 0.847100 +vertex_buffer: -7.975000 +vertex_buffer: 2.609329 +vertex_buffer: -6.232546 +vertex_buffer: 0.039200 +vertex_buffer: 0.847400 +vertex_buffer: -7.962000 +vertex_buffer: 2.617311 +vertex_buffer: -6.241937 +vertex_buffer: 0.040500 +vertex_buffer: 0.847400 +vertex_buffer: -8.111000 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.222900 +vertex_buffer: 0.784000 +vertex_buffer: -8.116000 +vertex_buffer: 2.923470 +vertex_buffer: -5.681317 +vertex_buffer: 0.220600 +vertex_buffer: 0.783800 +vertex_buffer: -8.115000 +vertex_buffer: 2.981445 +vertex_buffer: -5.714432 +vertex_buffer: 0.214600 +vertex_buffer: 0.783700 +vertex_buffer: -8.081000 +vertex_buffer: 2.639407 +vertex_buffer: -6.273185 +vertex_buffer: 0.227500 +vertex_buffer: 0.833300 +vertex_buffer: -8.078000 +vertex_buffer: 2.705993 +vertex_buffer: -6.343039 +vertex_buffer: 0.219700 +vertex_buffer: 0.834800 +vertex_buffer: -8.077000 +vertex_buffer: 2.617311 +vertex_buffer: -6.241937 +vertex_buffer: 0.230400 +vertex_buffer: 0.832400 +vertex_buffer: -7.984000 +vertex_buffer: 2.886415 +vertex_buffer: -7.224260 +vertex_buffer: 0.176300 +vertex_buffer: 0.879600 +vertex_buffer: -7.966000 +vertex_buffer: 2.899952 +vertex_buffer: -7.236242 +vertex_buffer: 0.174900 +vertex_buffer: 0.879800 +vertex_buffer: -7.870000 +vertex_buffer: 2.886415 +vertex_buffer: -7.224260 +vertex_buffer: 0.168700 +vertex_buffer: 0.879800 +vertex_buffer: -7.868000 +vertex_buffer: 2.845190 +vertex_buffer: -7.190347 +vertex_buffer: 0.164500 +vertex_buffer: 0.879000 +vertex_buffer: -7.874000 +vertex_buffer: 2.760812 +vertex_buffer: -7.105696 +vertex_buffer: 0.154900 +vertex_buffer: 0.876400 +vertex_buffer: -7.819000 +vertex_buffer: 2.598798 +vertex_buffer: -7.395112 +vertex_buffer: 0.146400 +vertex_buffer: 0.901300 +vertex_buffer: -7.810990 +vertex_buffer: 2.694416 +vertex_buffer: -7.491223 +vertex_buffer: 0.158600 +vertex_buffer: 0.904300 +vertex_buffer: -7.828000 +vertex_buffer: 2.768480 +vertex_buffer: -7.441971 +vertex_buffer: 0.165100 +vertex_buffer: 0.898700 +vertex_buffer: -7.923000 +vertex_buffer: 2.392508 +vertex_buffer: -6.583891 +vertex_buffer: 0.040800 +vertex_buffer: 0.880000 +vertex_buffer: -7.910000 +vertex_buffer: 2.398530 +vertex_buffer: -6.603409 +vertex_buffer: 0.042400 +vertex_buffer: 0.880600 +vertex_buffer: -8.008000 +vertex_buffer: 2.392508 +vertex_buffer: -6.583891 +vertex_buffer: 0.035100 +vertex_buffer: 0.879700 +vertex_buffer: -7.960990 +vertex_buffer: 2.257022 +vertex_buffer: -6.797844 +vertex_buffer: 0.036900 +vertex_buffer: 0.899900 +vertex_buffer: -7.875990 +vertex_buffer: 2.257022 +vertex_buffer: -6.797844 +vertex_buffer: 0.042500 +vertex_buffer: 0.900300 +vertex_buffer: -7.878000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.044500 +vertex_buffer: 0.896400 +vertex_buffer: -8.023990 +vertex_buffer: 2.398530 +vertex_buffer: -6.603409 +vertex_buffer: 0.237500 +vertex_buffer: 0.865700 +vertex_buffer: -8.027000 +vertex_buffer: 2.416037 +vertex_buffer: -6.664004 +vertex_buffer: 0.234100 +vertex_buffer: 0.868500 +vertex_buffer: -8.021000 +vertex_buffer: 2.477772 +vertex_buffer: -6.773771 +vertex_buffer: 0.225400 +vertex_buffer: 0.872400 +vertex_buffer: -7.973990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.239800 +vertex_buffer: 0.889500 +vertex_buffer: -7.966000 +vertex_buffer: 2.324553 +vertex_buffer: -7.024226 +vertex_buffer: 0.230700 +vertex_buffer: 0.895400 +vertex_buffer: -7.984000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.241100 +vertex_buffer: 0.881300 +vertex_buffer: -7.869990 +vertex_buffer: 3.893466 +vertex_buffer: -0.199551 +vertex_buffer: 0.209300 +vertex_buffer: 0.401700 +vertex_buffer: -7.847000 +vertex_buffer: 3.902400 +vertex_buffer: -0.197242 +vertex_buffer: 0.207700 +vertex_buffer: 0.401200 +vertex_buffer: -7.805000 +vertex_buffer: 3.890782 +vertex_buffer: 0.526775 +vertex_buffer: 0.211700 +vertex_buffer: 0.353300 +vertex_buffer: -7.781000 +vertex_buffer: 3.899673 +vertex_buffer: 0.530080 +vertex_buffer: 0.210000 +vertex_buffer: 0.352800 +vertex_buffer: -7.973000 +vertex_buffer: 3.836469 +vertex_buffer: -1.789402 +vertex_buffer: 0.204700 +vertex_buffer: 0.507500 +vertex_buffer: -7.903000 +vertex_buffer: 3.858916 +vertex_buffer: -0.683997 +vertex_buffer: 0.209200 +vertex_buffer: 0.434100 +vertex_buffer: -8.058990 +vertex_buffer: 3.775105 +vertex_buffer: -3.074223 +vertex_buffer: 0.199500 +vertex_buffer: 0.592800 +vertex_buffer: -8.058990 +vertex_buffer: 3.721167 +vertex_buffer: -3.077068 +vertex_buffer: 0.203000 +vertex_buffer: 0.593900 +vertex_buffer: -7.973000 +vertex_buffer: 3.775511 +vertex_buffer: -1.791471 +vertex_buffer: 0.208700 +vertex_buffer: 0.508400 +vertex_buffer: -7.903000 +vertex_buffer: 3.787890 +vertex_buffer: -0.684412 +vertex_buffer: 0.213800 +vertex_buffer: 0.434700 +vertex_buffer: -7.714990 +vertex_buffer: 3.892960 +vertex_buffer: -0.184558 +vertex_buffer: 0.198900 +vertex_buffer: 0.399400 +vertex_buffer: -7.710000 +vertex_buffer: 3.865943 +vertex_buffer: -0.184470 +vertex_buffer: 0.197100 +vertex_buffer: 0.399400 +vertex_buffer: -7.638000 +vertex_buffer: 3.863151 +vertex_buffer: 0.544850 +vertex_buffer: 0.198800 +vertex_buffer: 0.350700 +vertex_buffer: -7.642990 +vertex_buffer: 3.890169 +vertex_buffer: 0.544772 +vertex_buffer: 0.200700 +vertex_buffer: 0.350700 +vertex_buffer: -7.638000 +vertex_buffer: 3.790160 +vertex_buffer: 0.543377 +vertex_buffer: 0.193800 +vertex_buffer: 0.350800 +vertex_buffer: -7.710000 +vertex_buffer: 3.791945 +vertex_buffer: -0.185995 +vertex_buffer: 0.192200 +vertex_buffer: 0.399600 +vertex_buffer: -7.641000 +vertex_buffer: 3.885418 +vertex_buffer: 2.361643 +vertex_buffer: 0.216600 +vertex_buffer: 0.230800 +vertex_buffer: -7.650000 +vertex_buffer: 3.858468 +vertex_buffer: 2.359723 +vertex_buffer: 0.218500 +vertex_buffer: 0.231100 +vertex_buffer: -7.650000 +vertex_buffer: 3.785477 +vertex_buffer: 2.358199 +vertex_buffer: 0.223300 +vertex_buffer: 0.231600 +vertex_buffer: -7.350000 +vertex_buffer: 3.886940 +vertex_buffer: 4.918659 +vertex_buffer: 0.220500 +vertex_buffer: 0.055400 +vertex_buffer: -7.352990 +vertex_buffer: 3.884966 +vertex_buffer: 4.875184 +vertex_buffer: 0.220300 +vertex_buffer: 0.058300 +vertex_buffer: -7.172990 +vertex_buffer: 3.876585 +vertex_buffer: 4.902972 +vertex_buffer: 0.208700 +vertex_buffer: 0.054700 +vertex_buffer: -7.170000 +vertex_buffer: 3.878584 +vertex_buffer: 4.947460 +vertex_buffer: 0.208900 +vertex_buffer: 0.051700 +vertex_buffer: -7.162000 +vertex_buffer: 3.852596 +vertex_buffer: 4.948288 +vertex_buffer: 0.207200 +vertex_buffer: 0.051600 +vertex_buffer: -7.166000 +vertex_buffer: 3.849597 +vertex_buffer: 4.903810 +vertex_buffer: 0.207000 +vertex_buffer: 0.054600 +vertex_buffer: -7.169000 +vertex_buffer: 3.853018 +vertex_buffer: 4.967149 +vertex_buffer: 0.207300 +vertex_buffer: 0.050200 +vertex_buffer: -7.169000 +vertex_buffer: 3.780995 +vertex_buffer: 4.966923 +vertex_buffer: 0.202400 +vertex_buffer: 0.050300 +vertex_buffer: -7.162000 +vertex_buffer: 3.779572 +vertex_buffer: 4.948063 +vertex_buffer: 0.202300 +vertex_buffer: 0.051600 +vertex_buffer: -7.166000 +vertex_buffer: 3.777573 +vertex_buffer: 4.903606 +vertex_buffer: 0.202200 +vertex_buffer: 0.054700 +vertex_buffer: -7.847000 +vertex_buffer: 3.868250 +vertex_buffer: -1.782319 +vertex_buffer: 0.195200 +vertex_buffer: 0.505600 +vertex_buffer: -7.763990 +vertex_buffer: 3.893518 +vertex_buffer: -0.671815 +vertex_buffer: 0.198700 +vertex_buffer: 0.431800 +vertex_buffer: -7.946990 +vertex_buffer: 3.803019 +vertex_buffer: -3.071277 +vertex_buffer: 0.190800 +vertex_buffer: 0.591300 +vertex_buffer: -7.803000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.166300 +vertex_buffer: 0.917200 +vertex_buffer: -7.888000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.172000 +vertex_buffer: 0.917200 +vertex_buffer: -7.820000 +vertex_buffer: 2.747339 +vertex_buffer: -7.532923 +vertex_buffer: 0.166600 +vertex_buffer: 0.905600 +vertex_buffer: -7.803000 +vertex_buffer: 2.644913 +vertex_buffer: -7.592765 +vertex_buffer: 0.155700 +vertex_buffer: 0.912500 +vertex_buffer: -7.812000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.143300 +vertex_buffer: 0.909300 +vertex_buffer: -7.897000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.172300 +vertex_buffer: 0.929500 +vertex_buffer: -7.812000 +vertex_buffer: 2.545880 +vertex_buffer: -7.491230 +vertex_buffer: 0.166600 +vertex_buffer: 0.929200 +vertex_buffer: -8.003990 +vertex_buffer: 3.680678 +vertex_buffer: -4.093029 +vertex_buffer: 0.186300 +vertex_buffer: 0.659300 +vertex_buffer: -8.019000 +vertex_buffer: 3.485179 +vertex_buffer: -4.988288 +vertex_buffer: 0.182000 +vertex_buffer: 0.719600 +vertex_buffer: -8.000000 +vertex_buffer: 3.292145 +vertex_buffer: -5.899772 +vertex_buffer: 0.177400 +vertex_buffer: 0.782500 +vertex_buffer: -7.953000 +vertex_buffer: 3.086942 +vertex_buffer: -6.694157 +vertex_buffer: 0.173300 +vertex_buffer: 0.839000 +vertex_buffer: -7.881000 +vertex_buffer: 2.899952 +vertex_buffer: -7.236242 +vertex_buffer: 0.169900 +vertex_buffer: 0.880000 +vertex_buffer: -7.479000 +vertex_buffer: 3.893669 +vertex_buffer: 2.383827 +vertex_buffer: 0.205900 +vertex_buffer: 0.227800 +vertex_buffer: -7.614000 +vertex_buffer: 3.894277 +vertex_buffer: 2.365932 +vertex_buffer: 0.214700 +vertex_buffer: 0.230200 +vertex_buffer: -7.197000 +vertex_buffer: 3.885506 +vertex_buffer: 4.899257 +vertex_buffer: 0.210300 +vertex_buffer: 0.055200 +vertex_buffer: -7.459000 +vertex_buffer: 3.884572 +vertex_buffer: 2.386488 +vertex_buffer: 0.204400 +vertex_buffer: 0.227500 +vertex_buffer: -7.192990 +vertex_buffer: 3.887502 +vertex_buffer: 4.943735 +vertex_buffer: 0.210400 +vertex_buffer: 0.052200 +vertex_buffer: -7.380000 +vertex_buffer: 3.852270 +vertex_buffer: 4.933776 +vertex_buffer: 0.223200 +vertex_buffer: 0.052900 +vertex_buffer: -7.380000 +vertex_buffer: 3.780248 +vertex_buffer: 4.933626 +vertex_buffer: 0.224500 +vertex_buffer: 0.048100 +vertex_buffer: -8.053000 +vertex_buffer: 3.447424 +vertex_buffer: -3.091670 +vertex_buffer: 0.036800 +vertex_buffer: 0.617500 +vertex_buffer: -8.107000 +vertex_buffer: 3.357945 +vertex_buffer: -4.109599 +vertex_buffer: 0.033700 +vertex_buffer: 0.685800 +vertex_buffer: -8.123990 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.032500 +vertex_buffer: 0.743700 +vertex_buffer: -8.111000 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.032000 +vertex_buffer: 0.799200 +vertex_buffer: -8.077000 +vertex_buffer: 2.617311 +vertex_buffer: -6.241937 +vertex_buffer: 0.032000 +vertex_buffer: 0.847000 +vertex_buffer: -8.023990 +vertex_buffer: 2.398530 +vertex_buffer: -6.603409 +vertex_buffer: 0.033400 +vertex_buffer: 0.880300 +vertex_buffer: -7.984000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.034200 +vertex_buffer: 0.896200 +vertex_buffer: -7.395000 +vertex_buffer: 3.450573 +vertex_buffer: 4.868867 +vertex_buffer: 0.248500 +vertex_buffer: 0.061500 +vertex_buffer: -7.395000 +vertex_buffer: 3.377893 +vertex_buffer: 4.869167 +vertex_buffer: 0.253400 +vertex_buffer: 0.062000 +vertex_buffer: -7.392000 +vertex_buffer: 3.380724 +vertex_buffer: 4.912542 +vertex_buffer: 0.253500 +vertex_buffer: 0.059000 +vertex_buffer: -7.392000 +vertex_buffer: 3.453546 +vertex_buffer: 4.912252 +vertex_buffer: 0.248600 +vertex_buffer: 0.058500 +vertex_buffer: -7.384000 +vertex_buffer: 3.350628 +vertex_buffer: 4.870098 +vertex_buffer: 0.255400 +vertex_buffer: 0.062000 +vertex_buffer: -7.380990 +vertex_buffer: 3.353446 +vertex_buffer: 4.914453 +vertex_buffer: 0.255500 +vertex_buffer: 0.058900 +vertex_buffer: -7.372000 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.255200 +vertex_buffer: 0.057600 +vertex_buffer: -7.380000 +vertex_buffer: 3.381164 +vertex_buffer: 4.934087 +vertex_buffer: 0.253700 +vertex_buffer: 0.057400 +vertex_buffer: -7.380000 +vertex_buffer: 3.454030 +vertex_buffer: 4.933805 +vertex_buffer: 0.248700 +vertex_buffer: 0.056800 +vertex_buffer: -7.973990 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.034000 +vertex_buffer: 0.904500 +vertex_buffer: -7.941000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.037400 +vertex_buffer: 0.911100 +vertex_buffer: -7.941000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.241900 +vertex_buffer: 0.896700 +vertex_buffer: -7.932000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.232600 +vertex_buffer: 0.903300 +vertex_buffer: -7.828000 +vertex_buffer: 3.530380 +vertex_buffer: -1.791804 +vertex_buffer: 0.172000 +vertex_buffer: 0.507000 +vertex_buffer: -7.741990 +vertex_buffer: 3.470328 +vertex_buffer: -0.677179 +vertex_buffer: 0.172000 +vertex_buffer: 0.432200 +vertex_buffer: -7.741990 +vertex_buffer: 3.409785 +vertex_buffer: -0.678599 +vertex_buffer: 0.167200 +vertex_buffer: 0.432400 +vertex_buffer: -7.828000 +vertex_buffer: 3.467998 +vertex_buffer: -1.793948 +vertex_buffer: 0.167800 +vertex_buffer: 0.507400 +vertex_buffer: -7.834000 +vertex_buffer: 3.449010 +vertex_buffer: -1.795735 +vertex_buffer: 0.166200 +vertex_buffer: 0.507700 +vertex_buffer: -7.749000 +vertex_buffer: 3.386746 +vertex_buffer: -0.679570 +vertex_buffer: 0.165300 +vertex_buffer: 0.432600 +vertex_buffer: -7.934000 +vertex_buffer: 3.447359 +vertex_buffer: -3.089672 +vertex_buffer: 0.165300 +vertex_buffer: 0.594900 +vertex_buffer: -7.928990 +vertex_buffer: 3.464759 +vertex_buffer: -3.087906 +vertex_buffer: 0.166900 +vertex_buffer: 0.594600 +vertex_buffer: -7.928990 +vertex_buffer: 3.514363 +vertex_buffer: -3.086102 +vertex_buffer: 0.170500 +vertex_buffer: 0.594000 +vertex_buffer: -7.347990 +vertex_buffer: 3.455285 +vertex_buffer: 4.945203 +vertex_buffer: 0.227600 +vertex_buffer: 0.028600 +vertex_buffer: -7.347990 +vertex_buffer: 3.382373 +vertex_buffer: 4.945470 +vertex_buffer: 0.228900 +vertex_buffer: 0.023800 +vertex_buffer: -7.191000 +vertex_buffer: 3.382879 +vertex_buffer: 4.970206 +vertex_buffer: 0.218700 +vertex_buffer: 0.021000 +vertex_buffer: -7.191000 +vertex_buffer: 3.455840 +vertex_buffer: 4.969988 +vertex_buffer: 0.217300 +vertex_buffer: 0.025700 +vertex_buffer: -7.349000 +vertex_buffer: 3.354916 +vertex_buffer: 4.939652 +vertex_buffer: 0.229500 +vertex_buffer: 0.022000 +vertex_buffer: -7.192000 +vertex_buffer: 3.355385 +vertex_buffer: 4.963381 +vertex_buffer: 0.219300 +vertex_buffer: 0.019200 +vertex_buffer: -7.175000 +vertex_buffer: 3.362181 +vertex_buffer: 4.961208 +vertex_buffer: 0.218100 +vertex_buffer: 0.019200 +vertex_buffer: -7.169000 +vertex_buffer: 3.381838 +vertex_buffer: 4.967018 +vertex_buffer: 0.217300 +vertex_buffer: 0.020500 +vertex_buffer: -7.169000 +vertex_buffer: 3.454769 +vertex_buffer: 4.966805 +vertex_buffer: 0.215900 +vertex_buffer: 0.025300 +vertex_buffer: -7.988000 +vertex_buffer: 3.375448 +vertex_buffer: -4.108757 +vertex_buffer: 0.162200 +vertex_buffer: 0.663900 +vertex_buffer: -7.988000 +vertex_buffer: 3.415554 +vertex_buffer: -4.105842 +vertex_buffer: 0.166000 +vertex_buffer: 0.663100 +vertex_buffer: -7.992000 +vertex_buffer: 3.357945 +vertex_buffer: -4.109599 +vertex_buffer: 0.160600 +vertex_buffer: 0.664300 +vertex_buffer: -8.009000 +vertex_buffer: 3.177350 +vertex_buffer: -4.941065 +vertex_buffer: 0.151800 +vertex_buffer: 0.722800 +vertex_buffer: -8.004990 +vertex_buffer: 3.196587 +vertex_buffer: -4.943947 +vertex_buffer: 0.153700 +vertex_buffer: 0.722600 +vertex_buffer: -8.003990 +vertex_buffer: 3.242119 +vertex_buffer: -4.950491 +vertex_buffer: 0.158100 +vertex_buffer: 0.722100 +vertex_buffer: -8.112000 +vertex_buffer: 3.596764 +vertex_buffer: -4.096878 +vertex_buffer: 0.198600 +vertex_buffer: 0.661900 +vertex_buffer: -8.128000 +vertex_buffer: 3.417072 +vertex_buffer: -4.978396 +vertex_buffer: 0.195100 +vertex_buffer: 0.721700 +vertex_buffer: -7.991000 +vertex_buffer: 2.923470 +vertex_buffer: -5.681317 +vertex_buffer: 0.140000 +vertex_buffer: 0.778100 +vertex_buffer: -7.990000 +vertex_buffer: 2.981445 +vertex_buffer: -5.714432 +vertex_buffer: 0.145900 +vertex_buffer: 0.778800 +vertex_buffer: -7.995990 +vertex_buffer: 2.901561 +vertex_buffer: -5.669784 +vertex_buffer: 0.137700 +vertex_buffer: 0.777900 +vertex_buffer: -7.962000 +vertex_buffer: 2.617311 +vertex_buffer: -6.241937 +vertex_buffer: 0.122700 +vertex_buffer: 0.824400 +vertex_buffer: -7.956000 +vertex_buffer: 2.639407 +vertex_buffer: -6.273185 +vertex_buffer: 0.125400 +vertex_buffer: 0.825900 +vertex_buffer: -7.953000 +vertex_buffer: 2.705993 +vertex_buffer: -6.343039 +vertex_buffer: 0.132900 +vertex_buffer: 0.828500 +vertex_buffer: -8.109000 +vertex_buffer: 3.209211 +vertex_buffer: -5.851367 +vertex_buffer: 0.191000 +vertex_buffer: 0.782700 +vertex_buffer: -8.066000 +vertex_buffer: 2.980811 +vertex_buffer: -6.599105 +vertex_buffer: 0.188300 +vertex_buffer: 0.837500 +vertex_buffer: -7.903000 +vertex_buffer: 2.416037 +vertex_buffer: -6.664004 +vertex_buffer: 0.113300 +vertex_buffer: 0.859500 +vertex_buffer: -7.897000 +vertex_buffer: 2.477772 +vertex_buffer: -6.773771 +vertex_buffer: 0.121200 +vertex_buffer: 0.864700 +vertex_buffer: -7.910000 +vertex_buffer: 2.398530 +vertex_buffer: -6.603409 +vertex_buffer: 0.110400 +vertex_buffer: 0.856100 +vertex_buffer: -7.878000 +vertex_buffer: 2.296595 +vertex_buffer: -6.770846 +vertex_buffer: 0.104400 +vertex_buffer: 0.870800 +vertex_buffer: -7.860000 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.104200 +vertex_buffer: 0.879200 +vertex_buffer: -7.850990 +vertex_buffer: 2.324553 +vertex_buffer: -7.024226 +vertex_buffer: 0.112200 +vertex_buffer: 0.886500 +vertex_buffer: -7.998000 +vertex_buffer: 2.760812 +vertex_buffer: -7.105696 +vertex_buffer: 0.190300 +vertex_buffer: 0.878600 +vertex_buffer: -7.973000 +vertex_buffer: 3.530650 +vertex_buffer: -1.799798 +vertex_buffer: 0.224900 +vertex_buffer: 0.510900 +vertex_buffer: -7.973000 +vertex_buffer: 3.468228 +vertex_buffer: -1.801936 +vertex_buffer: 0.229000 +vertex_buffer: 0.511800 +vertex_buffer: -7.967000 +vertex_buffer: 3.449207 +vertex_buffer: -1.802729 +vertex_buffer: 0.230600 +vertex_buffer: 0.512100 +vertex_buffer: -7.903000 +vertex_buffer: 3.410186 +vertex_buffer: -0.691589 +vertex_buffer: 0.237500 +vertex_buffer: 0.437500 +vertex_buffer: -7.897000 +vertex_buffer: 3.387124 +vertex_buffer: -0.691567 +vertex_buffer: 0.239400 +vertex_buffer: 0.437700 +vertex_buffer: -7.903000 +vertex_buffer: 3.470769 +vertex_buffer: -0.690174 +vertex_buffer: 0.232800 +vertex_buffer: 0.436900 +vertex_buffer: -7.453000 +vertex_buffer: 3.387348 +vertex_buffer: 2.375706 +vertex_buffer: 0.172800 +vertex_buffer: 0.227400 +vertex_buffer: -7.453000 +vertex_buffer: 3.456503 +vertex_buffer: 2.377381 +vertex_buffer: 0.177800 +vertex_buffer: 0.227400 +vertex_buffer: -7.166000 +vertex_buffer: 3.451351 +vertex_buffer: 4.903571 +vertex_buffer: 0.182600 +vertex_buffer: 0.054900 +vertex_buffer: -7.166000 +vertex_buffer: 3.378595 +vertex_buffer: 4.903802 +vertex_buffer: 0.177700 +vertex_buffer: 0.055000 +vertex_buffer: -7.460000 +vertex_buffer: 3.362018 +vertex_buffer: 2.373692 +vertex_buffer: 0.170900 +vertex_buffer: 0.227600 +vertex_buffer: -7.172990 +vertex_buffer: 3.351269 +vertex_buffer: 4.902964 +vertex_buffer: 0.175700 +vertex_buffer: 0.055100 +vertex_buffer: -7.877000 +vertex_buffer: 3.461314 +vertex_buffer: -0.206221 +vertex_buffer: 0.235800 +vertex_buffer: 0.404400 +vertex_buffer: -7.877000 +vertex_buffer: 3.398390 +vertex_buffer: -0.206755 +vertex_buffer: 0.240800 +vertex_buffer: 0.404900 +vertex_buffer: -7.872000 +vertex_buffer: 3.374127 +vertex_buffer: -0.206767 +vertex_buffer: 0.242700 +vertex_buffer: 0.405000 +vertex_buffer: -7.806990 +vertex_buffer: 3.370908 +vertex_buffer: 0.517625 +vertex_buffer: 0.245400 +vertex_buffer: 0.356300 +vertex_buffer: -7.813000 +vertex_buffer: 3.394889 +vertex_buffer: 0.517606 +vertex_buffer: 0.243500 +vertex_buffer: 0.356200 +vertex_buffer: -7.813000 +vertex_buffer: 3.459661 +vertex_buffer: 0.519144 +vertex_buffer: 0.238500 +vertex_buffer: 0.355700 +vertex_buffer: -7.169000 +vertex_buffer: 3.381838 +vertex_buffer: 4.967018 +vertex_buffer: 0.177700 +vertex_buffer: 0.050600 +vertex_buffer: -7.162000 +vertex_buffer: 3.381451 +vertex_buffer: 4.948179 +vertex_buffer: 0.177800 +vertex_buffer: 0.052000 +vertex_buffer: -7.162000 +vertex_buffer: 3.454346 +vertex_buffer: 4.947963 +vertex_buffer: 0.182700 +vertex_buffer: 0.051900 +vertex_buffer: -7.169000 +vertex_buffer: 3.454769 +vertex_buffer: 4.966805 +vertex_buffer: 0.182700 +vertex_buffer: 0.050500 +vertex_buffer: -7.175000 +vertex_buffer: 3.362181 +vertex_buffer: 4.961208 +vertex_buffer: 0.176300 +vertex_buffer: 0.051000 +vertex_buffer: -7.170000 +vertex_buffer: 3.354093 +vertex_buffer: 4.947351 +vertex_buffer: 0.175900 +vertex_buffer: 0.052100 +vertex_buffer: -7.857000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.101100 +vertex_buffer: 0.885600 +vertex_buffer: -7.847000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.109100 +vertex_buffer: 0.893600 +vertex_buffer: -7.857000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.043100 +vertex_buffer: 0.911500 +vertex_buffer: -7.860000 +vertex_buffer: 2.270065 +vertex_buffer: -6.882959 +vertex_buffer: 0.045600 +vertex_buffer: 0.904800 +vertex_buffer: -7.857000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.167100 +vertex_buffer: 0.977700 +vertex_buffer: -7.941000 +vertex_buffer: 2.221733 +vertex_buffer: -6.955181 +vertex_buffer: 0.172700 +vertex_buffer: 0.978000 +vertex_buffer: -7.932000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.172700 +vertex_buffer: 0.966700 +vertex_buffer: -7.847000 +vertex_buffer: 2.274002 +vertex_buffer: -7.107549 +vertex_buffer: 0.167000 +vertex_buffer: 0.966400 +vertex_buffer: -7.380000 +vertex_buffer: 3.454030 +vertex_buffer: 4.933805 +vertex_buffer: 0.229800 +vertex_buffer: 0.029200 +vertex_buffer: -7.380000 +vertex_buffer: 3.381164 +vertex_buffer: 4.934087 +vertex_buffer: 0.231100 +vertex_buffer: 0.024400 +vertex_buffer: -7.372000 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.231000 +vertex_buffer: 0.022900 +vertex_buffer: -7.453000 +vertex_buffer: 3.784569 +vertex_buffer: 2.384960 +vertex_buffer: 0.197600 +vertex_buffer: 0.227500 +vertex_buffer: -7.877000 +vertex_buffer: 3.792463 +vertex_buffer: -0.200980 +vertex_buffer: 0.215900 +vertex_buffer: 0.402600 +vertex_buffer: -7.813000 +vertex_buffer: 3.790806 +vertex_buffer: 0.524386 +vertex_buffer: 0.218500 +vertex_buffer: 0.354200 +vertex_buffer: -8.112000 +vertex_buffer: 3.650700 +vertex_buffer: -4.094052 +vertex_buffer: 0.195100 +vertex_buffer: 0.660600 +vertex_buffer: -8.127000 +vertex_buffer: 3.461763 +vertex_buffer: -4.984319 +vertex_buffer: 0.190900 +vertex_buffer: 0.720600 +vertex_buffer: -8.108000 +vertex_buffer: 3.265013 +vertex_buffer: -5.884281 +vertex_buffer: 0.185500 +vertex_buffer: 0.782600 +vertex_buffer: -8.062000 +vertex_buffer: 3.050547 +vertex_buffer: -6.662533 +vertex_buffer: 0.181000 +vertex_buffer: 0.838300 +vertex_buffer: -7.992000 +vertex_buffer: 2.845190 +vertex_buffer: -7.190347 +vertex_buffer: 0.180500 +vertex_buffer: 0.879500 +vertex_buffer: -7.453000 +vertex_buffer: 3.857556 +vertex_buffer: 2.386535 +vertex_buffer: 0.202500 +vertex_buffer: 0.227400 +vertex_buffer: -7.813000 +vertex_buffer: 3.863797 +vertex_buffer: 0.525859 +vertex_buffer: 0.213600 +vertex_buffer: 0.353600 +vertex_buffer: -7.660990 +vertex_buffer: 3.899231 +vertex_buffer: 0.543075 +vertex_buffer: 0.202100 +vertex_buffer: 0.350900 +vertex_buffer: -7.645000 +vertex_buffer: 3.370319 +vertex_buffer: 0.535608 +vertex_buffer: 0.166900 +vertex_buffer: 0.350800 +vertex_buffer: -7.638000 +vertex_buffer: 3.394282 +vertex_buffer: 0.536591 +vertex_buffer: 0.168800 +vertex_buffer: 0.350700 +vertex_buffer: -7.638000 +vertex_buffer: 3.459015 +vertex_buffer: 0.538135 +vertex_buffer: 0.173800 +vertex_buffer: 0.350600 +vertex_buffer: -7.650000 +vertex_buffer: 3.388224 +vertex_buffer: 2.349172 +vertex_buffer: 0.248100 +vertex_buffer: 0.233500 +vertex_buffer: -7.642000 +vertex_buffer: 3.362851 +vertex_buffer: 2.349142 +vertex_buffer: 0.250000 +vertex_buffer: 0.233600 +vertex_buffer: -7.650000 +vertex_buffer: 3.457405 +vertex_buffer: 2.350813 +vertex_buffer: 0.243100 +vertex_buffer: 0.233000 +vertex_buffer: -7.877000 +vertex_buffer: 3.866491 +vertex_buffer: -0.200468 +vertex_buffer: 0.211100 +vertex_buffer: 0.402000 +vertex_buffer: -7.732000 +vertex_buffer: 3.902024 +vertex_buffer: -0.186245 +vertex_buffer: 0.200200 +vertex_buffer: 0.399600 +vertex_buffer: -7.710000 +vertex_buffer: 3.397887 +vertex_buffer: -0.190762 +vertex_buffer: 0.167200 +vertex_buffer: 0.399500 +vertex_buffer: -7.716990 +vertex_buffer: 3.373675 +vertex_buffer: -0.192771 +vertex_buffer: 0.165200 +vertex_buffer: 0.399700 +vertex_buffer: -7.710000 +vertex_buffer: 3.460771 +vertex_buffer: -0.190223 +vertex_buffer: 0.172200 +vertex_buffer: 0.399500 +vertex_buffer: -7.949990 +vertex_buffer: 3.442044 +vertex_buffer: -1.802035 +vertex_buffer: 0.042300 +vertex_buffer: 0.531400 +vertex_buffer: -7.967000 +vertex_buffer: 3.449207 +vertex_buffer: -1.802729 +vertex_buffer: 0.041100 +vertex_buffer: 0.531500 +vertex_buffer: -7.850990 +vertex_buffer: 3.441902 +vertex_buffer: -1.797037 +vertex_buffer: 0.048900 +vertex_buffer: 0.531200 +vertex_buffer: -7.879990 +vertex_buffer: 3.379658 +vertex_buffer: -0.689881 +vertex_buffer: 0.049200 +vertex_buffer: 0.457400 +vertex_buffer: -7.770990 +vertex_buffer: 3.379373 +vertex_buffer: -0.680886 +vertex_buffer: 0.056500 +vertex_buffer: 0.457100 +vertex_buffer: -7.897000 +vertex_buffer: 3.387124 +vertex_buffer: -0.691567 +vertex_buffer: 0.047900 +vertex_buffer: 0.457400 +vertex_buffer: -7.740000 +vertex_buffer: 3.366214 +vertex_buffer: -0.195094 +vertex_buffer: 0.060500 +vertex_buffer: 0.425000 +vertex_buffer: -7.749000 +vertex_buffer: 3.386746 +vertex_buffer: -0.679570 +vertex_buffer: 0.058100 +vertex_buffer: 0.457100 +vertex_buffer: -7.716990 +vertex_buffer: 3.373675 +vertex_buffer: -0.192771 +vertex_buffer: 0.062200 +vertex_buffer: 0.425000 +vertex_buffer: -7.645000 +vertex_buffer: 3.370319 +vertex_buffer: 0.535608 +vertex_buffer: 0.071000 +vertex_buffer: 0.377100 +vertex_buffer: -7.667990 +vertex_buffer: 3.361828 +vertex_buffer: 0.533257 +vertex_buffer: 0.069300 +vertex_buffer: 0.377100 +vertex_buffer: -7.787990 +vertex_buffer: 3.362257 +vertex_buffer: 0.520264 +vertex_buffer: 0.061300 +vertex_buffer: 0.377200 +vertex_buffer: -7.853990 +vertex_buffer: 3.366539 +vertex_buffer: -0.205084 +vertex_buffer: 0.052900 +vertex_buffer: 0.425100 +vertex_buffer: -7.617990 +vertex_buffer: 3.353667 +vertex_buffer: 2.352724 +vertex_buffer: 0.086400 +vertex_buffer: 0.257100 +vertex_buffer: -7.484000 +vertex_buffer: 3.353064 +vertex_buffer: 2.370386 +vertex_buffer: 0.095400 +vertex_buffer: 0.256900 +vertex_buffer: -7.806990 +vertex_buffer: 3.370908 +vertex_buffer: 0.517625 +vertex_buffer: 0.059900 +vertex_buffer: 0.377200 +vertex_buffer: -7.872000 +vertex_buffer: 3.374127 +vertex_buffer: -0.206767 +vertex_buffer: 0.051600 +vertex_buffer: 0.425100 +vertex_buffer: -7.834000 +vertex_buffer: 3.449010 +vertex_buffer: -1.795735 +vertex_buffer: 0.050200 +vertex_buffer: 0.531200 +vertex_buffer: -7.352990 +vertex_buffer: 3.341964 +vertex_buffer: 4.875596 +vertex_buffer: 0.127900 +vertex_buffer: 0.090400 +vertex_buffer: -7.197000 +vertex_buffer: 3.342422 +vertex_buffer: 4.899300 +vertex_buffer: 0.138500 +vertex_buffer: 0.090000 +vertex_buffer: -7.350000 +vertex_buffer: 3.344753 +vertex_buffer: 4.918966 +vertex_buffer: 0.128500 +vertex_buffer: 0.087500 +vertex_buffer: -7.192990 +vertex_buffer: 3.345233 +vertex_buffer: 4.943666 +vertex_buffer: 0.139100 +vertex_buffer: 0.087100 +vertex_buffer: -7.192000 +vertex_buffer: 3.355385 +vertex_buffer: 4.963381 +vertex_buffer: 0.139400 +vertex_buffer: 0.085700 +vertex_buffer: -7.349000 +vertex_buffer: 3.354916 +vertex_buffer: 4.939652 +vertex_buffer: 0.128700 +vertex_buffer: 0.086000 +vertex_buffer: -7.372000 +vertex_buffer: 3.361561 +vertex_buffer: 4.930083 +vertex_buffer: 0.127000 +vertex_buffer: 0.086300 +vertex_buffer: -7.380990 +vertex_buffer: 3.353446 +vertex_buffer: 4.914453 +vertex_buffer: 0.126400 +vertex_buffer: 0.087500 +vertex_buffer: -7.384000 +vertex_buffer: 3.350628 +vertex_buffer: 4.870098 +vertex_buffer: 0.125700 +vertex_buffer: 0.090500 +vertex_buffer: -7.642000 +vertex_buffer: 3.362851 +vertex_buffer: 2.349142 +vertex_buffer: 0.084600 +vertex_buffer: 0.257100 +vertex_buffer: -7.460000 +vertex_buffer: 3.362018 +vertex_buffer: 2.373692 +vertex_buffer: 0.097100 +vertex_buffer: 0.256900 +vertex_buffer: -7.172990 +vertex_buffer: 3.351269 +vertex_buffer: 4.902964 +vertex_buffer: 0.140200 +vertex_buffer: 0.090000 +vertex_buffer: -7.170000 +vertex_buffer: 3.354093 +vertex_buffer: 4.947351 +vertex_buffer: 0.140700 +vertex_buffer: 0.087000 +vertex_buffer: -7.175000 +vertex_buffer: 3.362181 +vertex_buffer: 4.961208 +vertex_buffer: 0.140600 +vertex_buffer: 0.085800 +index_buffer: 0 +index_buffer: 1 +index_buffer: 2 +index_buffer: 0 +index_buffer: 2 +index_buffer: 3 +index_buffer: 0 +index_buffer: 3 +index_buffer: 4 +index_buffer: 5 +index_buffer: 6 +index_buffer: 7 +index_buffer: 5 +index_buffer: 7 +index_buffer: 8 +index_buffer: 5 +index_buffer: 8 +index_buffer: 9 +index_buffer: 10 +index_buffer: 11 +index_buffer: 12 +index_buffer: 10 +index_buffer: 12 +index_buffer: 13 +index_buffer: 14 +index_buffer: 15 +index_buffer: 12 +index_buffer: 14 +index_buffer: 12 +index_buffer: 11 +index_buffer: 16 +index_buffer: 17 +index_buffer: 11 +index_buffer: 16 +index_buffer: 11 +index_buffer: 10 +index_buffer: 17 +index_buffer: 16 +index_buffer: 18 +index_buffer: 17 +index_buffer: 18 +index_buffer: 19 +index_buffer: 19 +index_buffer: 20 +index_buffer: 21 +index_buffer: 19 +index_buffer: 21 +index_buffer: 17 +index_buffer: 22 +index_buffer: 23 +index_buffer: 19 +index_buffer: 22 +index_buffer: 19 +index_buffer: 18 +index_buffer: 22 +index_buffer: 24 +index_buffer: 25 +index_buffer: 22 +index_buffer: 25 +index_buffer: 23 +index_buffer: 25 +index_buffer: 26 +index_buffer: 27 +index_buffer: 25 +index_buffer: 27 +index_buffer: 23 +index_buffer: 28 +index_buffer: 25 +index_buffer: 24 +index_buffer: 28 +index_buffer: 24 +index_buffer: 29 +index_buffer: 30 +index_buffer: 31 +index_buffer: 28 +index_buffer: 30 +index_buffer: 28 +index_buffer: 29 +index_buffer: 32 +index_buffer: 33 +index_buffer: 28 +index_buffer: 32 +index_buffer: 28 +index_buffer: 31 +index_buffer: 34 +index_buffer: 31 +index_buffer: 30 +index_buffer: 34 +index_buffer: 30 +index_buffer: 35 +index_buffer: 36 +index_buffer: 37 +index_buffer: 38 +index_buffer: 36 +index_buffer: 38 +index_buffer: 39 +index_buffer: 39 +index_buffer: 38 +index_buffer: 40 +index_buffer: 39 +index_buffer: 40 +index_buffer: 41 +index_buffer: 42 +index_buffer: 43 +index_buffer: 1 +index_buffer: 42 +index_buffer: 1 +index_buffer: 0 +index_buffer: 43 +index_buffer: 44 +index_buffer: 45 +index_buffer: 43 +index_buffer: 45 +index_buffer: 1 +index_buffer: 45 +index_buffer: 44 +index_buffer: 46 +index_buffer: 45 +index_buffer: 46 +index_buffer: 47 +index_buffer: 48 +index_buffer: 47 +index_buffer: 46 +index_buffer: 48 +index_buffer: 46 +index_buffer: 49 +index_buffer: 50 +index_buffer: 51 +index_buffer: 52 +index_buffer: 50 +index_buffer: 52 +index_buffer: 53 +index_buffer: 51 +index_buffer: 54 +index_buffer: 55 +index_buffer: 51 +index_buffer: 55 +index_buffer: 52 +index_buffer: 54 +index_buffer: 56 +index_buffer: 57 +index_buffer: 54 +index_buffer: 57 +index_buffer: 55 +index_buffer: 57 +index_buffer: 56 +index_buffer: 58 +index_buffer: 57 +index_buffer: 58 +index_buffer: 59 +index_buffer: 59 +index_buffer: 58 +index_buffer: 60 +index_buffer: 59 +index_buffer: 60 +index_buffer: 61 +index_buffer: 61 +index_buffer: 60 +index_buffer: 62 +index_buffer: 61 +index_buffer: 62 +index_buffer: 63 +index_buffer: 63 +index_buffer: 62 +index_buffer: 64 +index_buffer: 63 +index_buffer: 64 +index_buffer: 65 +index_buffer: 65 +index_buffer: 64 +index_buffer: 66 +index_buffer: 65 +index_buffer: 66 +index_buffer: 67 +index_buffer: 34 +index_buffer: 35 +index_buffer: 68 +index_buffer: 34 +index_buffer: 68 +index_buffer: 69 +index_buffer: 70 +index_buffer: 71 +index_buffer: 34 +index_buffer: 70 +index_buffer: 34 +index_buffer: 69 +index_buffer: 69 +index_buffer: 68 +index_buffer: 72 +index_buffer: 69 +index_buffer: 72 +index_buffer: 73 +index_buffer: 74 +index_buffer: 75 +index_buffer: 76 +index_buffer: 74 +index_buffer: 76 +index_buffer: 77 +index_buffer: 78 +index_buffer: 79 +index_buffer: 80 +index_buffer: 78 +index_buffer: 80 +index_buffer: 81 +index_buffer: 82 +index_buffer: 83 +index_buffer: 84 +index_buffer: 82 +index_buffer: 84 +index_buffer: 85 +index_buffer: 85 +index_buffer: 84 +index_buffer: 86 +index_buffer: 85 +index_buffer: 86 +index_buffer: 87 +index_buffer: 84 +index_buffer: 88 +index_buffer: 89 +index_buffer: 84 +index_buffer: 89 +index_buffer: 86 +index_buffer: 83 +index_buffer: 90 +index_buffer: 88 +index_buffer: 83 +index_buffer: 88 +index_buffer: 84 +index_buffer: 91 +index_buffer: 92 +index_buffer: 93 +index_buffer: 91 +index_buffer: 93 +index_buffer: 94 +index_buffer: 92 +index_buffer: 95 +index_buffer: 96 +index_buffer: 92 +index_buffer: 96 +index_buffer: 93 +index_buffer: 92 +index_buffer: 97 +index_buffer: 98 +index_buffer: 92 +index_buffer: 98 +index_buffer: 95 +index_buffer: 99 +index_buffer: 97 +index_buffer: 92 +index_buffer: 99 +index_buffer: 92 +index_buffer: 91 +index_buffer: 100 +index_buffer: 101 +index_buffer: 102 +index_buffer: 100 +index_buffer: 102 +index_buffer: 103 +index_buffer: 100 +index_buffer: 104 +index_buffer: 105 +index_buffer: 100 +index_buffer: 105 +index_buffer: 101 +index_buffer: 106 +index_buffer: 104 +index_buffer: 100 +index_buffer: 106 +index_buffer: 100 +index_buffer: 107 +index_buffer: 108 +index_buffer: 107 +index_buffer: 100 +index_buffer: 108 +index_buffer: 100 +index_buffer: 103 +index_buffer: 109 +index_buffer: 107 +index_buffer: 108 +index_buffer: 109 +index_buffer: 108 +index_buffer: 110 +index_buffer: 109 +index_buffer: 111 +index_buffer: 106 +index_buffer: 109 +index_buffer: 106 +index_buffer: 107 +index_buffer: 112 +index_buffer: 111 +index_buffer: 109 +index_buffer: 112 +index_buffer: 109 +index_buffer: 113 +index_buffer: 113 +index_buffer: 109 +index_buffer: 110 +index_buffer: 113 +index_buffer: 110 +index_buffer: 114 +index_buffer: 115 +index_buffer: 113 +index_buffer: 114 +index_buffer: 115 +index_buffer: 114 +index_buffer: 116 +index_buffer: 117 +index_buffer: 112 +index_buffer: 113 +index_buffer: 117 +index_buffer: 113 +index_buffer: 115 +index_buffer: 118 +index_buffer: 117 +index_buffer: 115 +index_buffer: 118 +index_buffer: 115 +index_buffer: 119 +index_buffer: 119 +index_buffer: 115 +index_buffer: 116 +index_buffer: 119 +index_buffer: 116 +index_buffer: 120 +index_buffer: 121 +index_buffer: 122 +index_buffer: 119 +index_buffer: 121 +index_buffer: 119 +index_buffer: 120 +index_buffer: 123 +index_buffer: 118 +index_buffer: 119 +index_buffer: 123 +index_buffer: 119 +index_buffer: 122 +index_buffer: 124 +index_buffer: 123 +index_buffer: 122 +index_buffer: 124 +index_buffer: 122 +index_buffer: 125 +index_buffer: 125 +index_buffer: 122 +index_buffer: 121 +index_buffer: 125 +index_buffer: 121 +index_buffer: 126 +index_buffer: 127 +index_buffer: 128 +index_buffer: 129 +index_buffer: 127 +index_buffer: 129 +index_buffer: 130 +index_buffer: 131 +index_buffer: 36 +index_buffer: 130 +index_buffer: 131 +index_buffer: 130 +index_buffer: 129 +index_buffer: 132 +index_buffer: 133 +index_buffer: 131 +index_buffer: 132 +index_buffer: 131 +index_buffer: 129 +index_buffer: 132 +index_buffer: 129 +index_buffer: 128 +index_buffer: 132 +index_buffer: 128 +index_buffer: 134 +index_buffer: 135 +index_buffer: 136 +index_buffer: 132 +index_buffer: 135 +index_buffer: 132 +index_buffer: 134 +index_buffer: 136 +index_buffer: 137 +index_buffer: 133 +index_buffer: 136 +index_buffer: 133 +index_buffer: 132 +index_buffer: 137 +index_buffer: 136 +index_buffer: 138 +index_buffer: 137 +index_buffer: 138 +index_buffer: 139 +index_buffer: 140 +index_buffer: 138 +index_buffer: 136 +index_buffer: 140 +index_buffer: 136 +index_buffer: 135 +index_buffer: 141 +index_buffer: 142 +index_buffer: 138 +index_buffer: 141 +index_buffer: 138 +index_buffer: 140 +index_buffer: 139 +index_buffer: 138 +index_buffer: 142 +index_buffer: 139 +index_buffer: 142 +index_buffer: 143 +index_buffer: 144 +index_buffer: 143 +index_buffer: 142 +index_buffer: 144 +index_buffer: 142 +index_buffer: 145 +index_buffer: 141 +index_buffer: 146 +index_buffer: 145 +index_buffer: 141 +index_buffer: 145 +index_buffer: 142 +index_buffer: 147 +index_buffer: 145 +index_buffer: 146 +index_buffer: 147 +index_buffer: 146 +index_buffer: 148 +index_buffer: 147 +index_buffer: 149 +index_buffer: 144 +index_buffer: 147 +index_buffer: 144 +index_buffer: 145 +index_buffer: 150 +index_buffer: 149 +index_buffer: 147 +index_buffer: 150 +index_buffer: 147 +index_buffer: 151 +index_buffer: 151 +index_buffer: 147 +index_buffer: 148 +index_buffer: 151 +index_buffer: 148 +index_buffer: 152 +index_buffer: 153 +index_buffer: 154 +index_buffer: 155 +index_buffer: 153 +index_buffer: 155 +index_buffer: 15 +index_buffer: 15 +index_buffer: 155 +index_buffer: 156 +index_buffer: 15 +index_buffer: 156 +index_buffer: 12 +index_buffer: 154 +index_buffer: 157 +index_buffer: 158 +index_buffer: 154 +index_buffer: 158 +index_buffer: 155 +index_buffer: 159 +index_buffer: 160 +index_buffer: 161 +index_buffer: 159 +index_buffer: 161 +index_buffer: 162 +index_buffer: 163 +index_buffer: 164 +index_buffer: 160 +index_buffer: 163 +index_buffer: 160 +index_buffer: 159 +index_buffer: 165 +index_buffer: 159 +index_buffer: 162 +index_buffer: 165 +index_buffer: 162 +index_buffer: 4 +index_buffer: 166 +index_buffer: 151 +index_buffer: 152 +index_buffer: 166 +index_buffer: 152 +index_buffer: 167 +index_buffer: 168 +index_buffer: 150 +index_buffer: 151 +index_buffer: 168 +index_buffer: 151 +index_buffer: 166 +index_buffer: 168 +index_buffer: 166 +index_buffer: 169 +index_buffer: 168 +index_buffer: 169 +index_buffer: 170 +index_buffer: 166 +index_buffer: 167 +index_buffer: 171 +index_buffer: 166 +index_buffer: 171 +index_buffer: 169 +index_buffer: 172 +index_buffer: 173 +index_buffer: 174 +index_buffer: 172 +index_buffer: 174 +index_buffer: 175 +index_buffer: 176 +index_buffer: 177 +index_buffer: 174 +index_buffer: 176 +index_buffer: 174 +index_buffer: 173 +index_buffer: 178 +index_buffer: 179 +index_buffer: 180 +index_buffer: 178 +index_buffer: 180 +index_buffer: 181 +index_buffer: 181 +index_buffer: 182 +index_buffer: 183 +index_buffer: 181 +index_buffer: 183 +index_buffer: 178 +index_buffer: 181 +index_buffer: 10 +index_buffer: 13 +index_buffer: 181 +index_buffer: 13 +index_buffer: 182 +index_buffer: 16 +index_buffer: 10 +index_buffer: 181 +index_buffer: 16 +index_buffer: 181 +index_buffer: 180 +index_buffer: 183 +index_buffer: 182 +index_buffer: 184 +index_buffer: 183 +index_buffer: 184 +index_buffer: 185 +index_buffer: 186 +index_buffer: 185 +index_buffer: 184 +index_buffer: 186 +index_buffer: 184 +index_buffer: 187 +index_buffer: 188 +index_buffer: 189 +index_buffer: 187 +index_buffer: 188 +index_buffer: 187 +index_buffer: 184 +index_buffer: 13 +index_buffer: 188 +index_buffer: 184 +index_buffer: 13 +index_buffer: 184 +index_buffer: 182 +index_buffer: 190 +index_buffer: 191 +index_buffer: 192 +index_buffer: 190 +index_buffer: 192 +index_buffer: 193 +index_buffer: 193 +index_buffer: 194 +index_buffer: 195 +index_buffer: 193 +index_buffer: 195 +index_buffer: 190 +index_buffer: 196 +index_buffer: 194 +index_buffer: 193 +index_buffer: 196 +index_buffer: 193 +index_buffer: 197 +index_buffer: 193 +index_buffer: 192 +index_buffer: 198 +index_buffer: 193 +index_buffer: 198 +index_buffer: 197 +index_buffer: 199 +index_buffer: 200 +index_buffer: 201 +index_buffer: 199 +index_buffer: 201 +index_buffer: 202 +index_buffer: 203 +index_buffer: 201 +index_buffer: 200 +index_buffer: 203 +index_buffer: 200 +index_buffer: 204 +index_buffer: 205 +index_buffer: 200 +index_buffer: 199 +index_buffer: 205 +index_buffer: 199 +index_buffer: 206 +index_buffer: 207 +index_buffer: 208 +index_buffer: 209 +index_buffer: 207 +index_buffer: 209 +index_buffer: 210 +index_buffer: 209 +index_buffer: 211 +index_buffer: 212 +index_buffer: 209 +index_buffer: 212 +index_buffer: 210 +index_buffer: 213 +index_buffer: 214 +index_buffer: 215 +index_buffer: 213 +index_buffer: 215 +index_buffer: 216 +index_buffer: 215 +index_buffer: 214 +index_buffer: 217 +index_buffer: 215 +index_buffer: 217 +index_buffer: 218 +index_buffer: 214 +index_buffer: 219 +index_buffer: 220 +index_buffer: 214 +index_buffer: 220 +index_buffer: 217 +index_buffer: 213 +index_buffer: 221 +index_buffer: 219 +index_buffer: 213 +index_buffer: 219 +index_buffer: 214 +index_buffer: 222 +index_buffer: 223 +index_buffer: 224 +index_buffer: 222 +index_buffer: 224 +index_buffer: 225 +index_buffer: 224 +index_buffer: 223 +index_buffer: 226 +index_buffer: 224 +index_buffer: 226 +index_buffer: 227 +index_buffer: 228 +index_buffer: 229 +index_buffer: 230 +index_buffer: 228 +index_buffer: 230 +index_buffer: 231 +index_buffer: 231 +index_buffer: 230 +index_buffer: 232 +index_buffer: 231 +index_buffer: 232 +index_buffer: 233 +index_buffer: 234 +index_buffer: 235 +index_buffer: 236 +index_buffer: 234 +index_buffer: 236 +index_buffer: 237 +index_buffer: 236 +index_buffer: 235 +index_buffer: 229 +index_buffer: 236 +index_buffer: 229 +index_buffer: 228 +index_buffer: 238 +index_buffer: 239 +index_buffer: 240 +index_buffer: 238 +index_buffer: 240 +index_buffer: 241 +index_buffer: 237 +index_buffer: 240 +index_buffer: 239 +index_buffer: 237 +index_buffer: 239 +index_buffer: 234 +index_buffer: 242 +index_buffer: 243 +index_buffer: 244 +index_buffer: 242 +index_buffer: 244 +index_buffer: 245 +index_buffer: 244 +index_buffer: 238 +index_buffer: 241 +index_buffer: 244 +index_buffer: 241 +index_buffer: 245 +index_buffer: 246 +index_buffer: 247 +index_buffer: 248 +index_buffer: 246 +index_buffer: 248 +index_buffer: 249 +index_buffer: 248 +index_buffer: 247 +index_buffer: 243 +index_buffer: 248 +index_buffer: 243 +index_buffer: 242 +index_buffer: 250 +index_buffer: 251 +index_buffer: 252 +index_buffer: 250 +index_buffer: 252 +index_buffer: 253 +index_buffer: 251 +index_buffer: 246 +index_buffer: 249 +index_buffer: 251 +index_buffer: 249 +index_buffer: 252 +index_buffer: 254 +index_buffer: 255 +index_buffer: 256 +index_buffer: 254 +index_buffer: 256 +index_buffer: 257 +index_buffer: 256 +index_buffer: 255 +index_buffer: 250 +index_buffer: 256 +index_buffer: 250 +index_buffer: 253 +index_buffer: 258 +index_buffer: 259 +index_buffer: 260 +index_buffer: 258 +index_buffer: 260 +index_buffer: 261 +index_buffer: 258 +index_buffer: 254 +index_buffer: 257 +index_buffer: 258 +index_buffer: 257 +index_buffer: 259 +index_buffer: 262 +index_buffer: 263 +index_buffer: 264 +index_buffer: 262 +index_buffer: 264 +index_buffer: 265 +index_buffer: 261 +index_buffer: 260 +index_buffer: 263 +index_buffer: 261 +index_buffer: 263 +index_buffer: 262 +index_buffer: 266 +index_buffer: 267 +index_buffer: 268 +index_buffer: 266 +index_buffer: 268 +index_buffer: 269 +index_buffer: 269 +index_buffer: 268 +index_buffer: 270 +index_buffer: 269 +index_buffer: 270 +index_buffer: 271 +index_buffer: 272 +index_buffer: 273 +index_buffer: 274 +index_buffer: 272 +index_buffer: 274 +index_buffer: 275 +index_buffer: 276 +index_buffer: 277 +index_buffer: 272 +index_buffer: 276 +index_buffer: 272 +index_buffer: 275 +index_buffer: 278 +index_buffer: 279 +index_buffer: 280 +index_buffer: 278 +index_buffer: 280 +index_buffer: 281 +index_buffer: 282 +index_buffer: 279 +index_buffer: 278 +index_buffer: 282 +index_buffer: 278 +index_buffer: 283 +index_buffer: 284 +index_buffer: 285 +index_buffer: 286 +index_buffer: 284 +index_buffer: 286 +index_buffer: 287 +index_buffer: 285 +index_buffer: 288 +index_buffer: 289 +index_buffer: 285 +index_buffer: 289 +index_buffer: 286 +index_buffer: 285 +index_buffer: 30 +index_buffer: 29 +index_buffer: 285 +index_buffer: 29 +index_buffer: 288 +index_buffer: 30 +index_buffer: 285 +index_buffer: 284 +index_buffer: 30 +index_buffer: 284 +index_buffer: 35 +index_buffer: 288 +index_buffer: 290 +index_buffer: 291 +index_buffer: 288 +index_buffer: 291 +index_buffer: 289 +index_buffer: 292 +index_buffer: 293 +index_buffer: 291 +index_buffer: 292 +index_buffer: 291 +index_buffer: 290 +index_buffer: 22 +index_buffer: 292 +index_buffer: 290 +index_buffer: 22 +index_buffer: 290 +index_buffer: 24 +index_buffer: 24 +index_buffer: 290 +index_buffer: 288 +index_buffer: 24 +index_buffer: 288 +index_buffer: 29 +index_buffer: 294 +index_buffer: 293 +index_buffer: 292 +index_buffer: 294 +index_buffer: 292 +index_buffer: 295 +index_buffer: 180 +index_buffer: 179 +index_buffer: 294 +index_buffer: 180 +index_buffer: 294 +index_buffer: 295 +index_buffer: 16 +index_buffer: 180 +index_buffer: 295 +index_buffer: 16 +index_buffer: 295 +index_buffer: 18 +index_buffer: 295 +index_buffer: 292 +index_buffer: 22 +index_buffer: 295 +index_buffer: 22 +index_buffer: 18 +index_buffer: 296 +index_buffer: 297 +index_buffer: 298 +index_buffer: 296 +index_buffer: 298 +index_buffer: 299 +index_buffer: 300 +index_buffer: 297 +index_buffer: 296 +index_buffer: 300 +index_buffer: 296 +index_buffer: 301 +index_buffer: 302 +index_buffer: 303 +index_buffer: 300 +index_buffer: 302 +index_buffer: 300 +index_buffer: 301 +index_buffer: 304 +index_buffer: 305 +index_buffer: 303 +index_buffer: 304 +index_buffer: 303 +index_buffer: 302 +index_buffer: 306 +index_buffer: 307 +index_buffer: 305 +index_buffer: 306 +index_buffer: 305 +index_buffer: 304 +index_buffer: 308 +index_buffer: 307 +index_buffer: 306 +index_buffer: 308 +index_buffer: 306 +index_buffer: 309 +index_buffer: 310 +index_buffer: 308 +index_buffer: 309 +index_buffer: 310 +index_buffer: 309 +index_buffer: 311 +index_buffer: 312 +index_buffer: 310 +index_buffer: 311 +index_buffer: 312 +index_buffer: 311 +index_buffer: 313 +index_buffer: 314 +index_buffer: 312 +index_buffer: 313 +index_buffer: 314 +index_buffer: 313 +index_buffer: 315 +index_buffer: 316 +index_buffer: 314 +index_buffer: 315 +index_buffer: 316 +index_buffer: 315 +index_buffer: 317 +index_buffer: 318 +index_buffer: 319 +index_buffer: 316 +index_buffer: 318 +index_buffer: 316 +index_buffer: 317 +index_buffer: 320 +index_buffer: 321 +index_buffer: 319 +index_buffer: 320 +index_buffer: 319 +index_buffer: 318 +index_buffer: 322 +index_buffer: 323 +index_buffer: 321 +index_buffer: 322 +index_buffer: 321 +index_buffer: 320 +index_buffer: 324 +index_buffer: 325 +index_buffer: 323 +index_buffer: 324 +index_buffer: 323 +index_buffer: 322 +index_buffer: 326 +index_buffer: 327 +index_buffer: 328 +index_buffer: 326 +index_buffer: 328 +index_buffer: 329 +index_buffer: 330 +index_buffer: 331 +index_buffer: 326 +index_buffer: 330 +index_buffer: 326 +index_buffer: 329 +index_buffer: 332 +index_buffer: 333 +index_buffer: 334 +index_buffer: 332 +index_buffer: 334 +index_buffer: 335 +index_buffer: 336 +index_buffer: 333 +index_buffer: 332 +index_buffer: 336 +index_buffer: 332 +index_buffer: 337 +index_buffer: 338 +index_buffer: 339 +index_buffer: 336 +index_buffer: 338 +index_buffer: 336 +index_buffer: 337 +index_buffer: 340 +index_buffer: 339 +index_buffer: 338 +index_buffer: 340 +index_buffer: 338 +index_buffer: 341 +index_buffer: 342 +index_buffer: 343 +index_buffer: 340 +index_buffer: 342 +index_buffer: 340 +index_buffer: 341 +index_buffer: 344 +index_buffer: 343 +index_buffer: 342 +index_buffer: 344 +index_buffer: 342 +index_buffer: 345 +index_buffer: 346 +index_buffer: 347 +index_buffer: 344 +index_buffer: 346 +index_buffer: 344 +index_buffer: 345 +index_buffer: 348 +index_buffer: 347 +index_buffer: 346 +index_buffer: 348 +index_buffer: 346 +index_buffer: 349 +index_buffer: 350 +index_buffer: 351 +index_buffer: 348 +index_buffer: 350 +index_buffer: 348 +index_buffer: 349 +index_buffer: 352 +index_buffer: 351 +index_buffer: 350 +index_buffer: 352 +index_buffer: 350 +index_buffer: 353 +index_buffer: 354 +index_buffer: 355 +index_buffer: 352 +index_buffer: 354 +index_buffer: 352 +index_buffer: 353 +index_buffer: 356 +index_buffer: 357 +index_buffer: 355 +index_buffer: 356 +index_buffer: 355 +index_buffer: 354 +index_buffer: 358 +index_buffer: 357 +index_buffer: 356 +index_buffer: 358 +index_buffer: 356 +index_buffer: 359 +index_buffer: 360 +index_buffer: 361 +index_buffer: 358 +index_buffer: 360 +index_buffer: 358 +index_buffer: 359 +index_buffer: 362 +index_buffer: 361 +index_buffer: 360 +index_buffer: 362 +index_buffer: 360 +index_buffer: 363 +index_buffer: 364 +index_buffer: 362 +index_buffer: 363 +index_buffer: 364 +index_buffer: 363 +index_buffer: 365 +index_buffer: 366 +index_buffer: 364 +index_buffer: 365 +index_buffer: 366 +index_buffer: 365 +index_buffer: 367 +index_buffer: 368 +index_buffer: 366 +index_buffer: 367 +index_buffer: 368 +index_buffer: 367 +index_buffer: 369 +index_buffer: 370 +index_buffer: 371 +index_buffer: 372 +index_buffer: 370 +index_buffer: 372 +index_buffer: 373 +index_buffer: 374 +index_buffer: 371 +index_buffer: 370 +index_buffer: 374 +index_buffer: 370 +index_buffer: 375 +index_buffer: 376 +index_buffer: 377 +index_buffer: 374 +index_buffer: 376 +index_buffer: 374 +index_buffer: 375 +index_buffer: 378 +index_buffer: 379 +index_buffer: 377 +index_buffer: 378 +index_buffer: 377 +index_buffer: 376 +index_buffer: 130 +index_buffer: 36 +index_buffer: 39 +index_buffer: 130 +index_buffer: 39 +index_buffer: 380 +index_buffer: 130 +index_buffer: 380 +index_buffer: 381 +index_buffer: 130 +index_buffer: 381 +index_buffer: 127 +index_buffer: 382 +index_buffer: 383 +index_buffer: 381 +index_buffer: 382 +index_buffer: 381 +index_buffer: 380 +index_buffer: 380 +index_buffer: 39 +index_buffer: 41 +index_buffer: 380 +index_buffer: 41 +index_buffer: 382 +index_buffer: 384 +index_buffer: 385 +index_buffer: 75 +index_buffer: 384 +index_buffer: 75 +index_buffer: 74 +index_buffer: 386 +index_buffer: 385 +index_buffer: 384 +index_buffer: 386 +index_buffer: 384 +index_buffer: 387 +index_buffer: 131 +index_buffer: 388 +index_buffer: 37 +index_buffer: 131 +index_buffer: 37 +index_buffer: 36 +index_buffer: 133 +index_buffer: 389 +index_buffer: 388 +index_buffer: 133 +index_buffer: 388 +index_buffer: 131 +index_buffer: 137 +index_buffer: 390 +index_buffer: 389 +index_buffer: 137 +index_buffer: 389 +index_buffer: 133 +index_buffer: 391 +index_buffer: 390 +index_buffer: 137 +index_buffer: 391 +index_buffer: 137 +index_buffer: 139 +index_buffer: 392 +index_buffer: 391 +index_buffer: 139 +index_buffer: 392 +index_buffer: 139 +index_buffer: 143 +index_buffer: 393 +index_buffer: 392 +index_buffer: 143 +index_buffer: 393 +index_buffer: 143 +index_buffer: 144 +index_buffer: 394 +index_buffer: 393 +index_buffer: 144 +index_buffer: 394 +index_buffer: 144 +index_buffer: 149 +index_buffer: 395 +index_buffer: 394 +index_buffer: 149 +index_buffer: 395 +index_buffer: 149 +index_buffer: 150 +index_buffer: 396 +index_buffer: 395 +index_buffer: 150 +index_buffer: 396 +index_buffer: 150 +index_buffer: 168 +index_buffer: 396 +index_buffer: 168 +index_buffer: 170 +index_buffer: 396 +index_buffer: 170 +index_buffer: 397 +index_buffer: 398 +index_buffer: 399 +index_buffer: 207 +index_buffer: 398 +index_buffer: 207 +index_buffer: 210 +index_buffer: 398 +index_buffer: 210 +index_buffer: 212 +index_buffer: 398 +index_buffer: 212 +index_buffer: 400 +index_buffer: 401 +index_buffer: 402 +index_buffer: 403 +index_buffer: 403 +index_buffer: 402 +index_buffer: 404 +index_buffer: 403 +index_buffer: 404 +index_buffer: 405 +index_buffer: 406 +index_buffer: 407 +index_buffer: 408 +index_buffer: 409 +index_buffer: 410 +index_buffer: 411 +index_buffer: 409 +index_buffer: 411 +index_buffer: 412 +index_buffer: 412 +index_buffer: 411 +index_buffer: 413 +index_buffer: 412 +index_buffer: 413 +index_buffer: 414 +index_buffer: 410 +index_buffer: 415 +index_buffer: 416 +index_buffer: 410 +index_buffer: 416 +index_buffer: 411 +index_buffer: 417 +index_buffer: 410 +index_buffer: 409 +index_buffer: 417 +index_buffer: 409 +index_buffer: 418 +index_buffer: 417 +index_buffer: 419 +index_buffer: 415 +index_buffer: 417 +index_buffer: 415 +index_buffer: 410 +index_buffer: 420 +index_buffer: 421 +index_buffer: 422 +index_buffer: 420 +index_buffer: 422 +index_buffer: 423 +index_buffer: 423 +index_buffer: 424 +index_buffer: 425 +index_buffer: 423 +index_buffer: 425 +index_buffer: 420 +index_buffer: 423 +index_buffer: 426 +index_buffer: 427 +index_buffer: 423 +index_buffer: 427 +index_buffer: 424 +index_buffer: 426 +index_buffer: 423 +index_buffer: 422 +index_buffer: 426 +index_buffer: 422 +index_buffer: 428 +index_buffer: 429 +index_buffer: 430 +index_buffer: 431 +index_buffer: 429 +index_buffer: 431 +index_buffer: 432 +index_buffer: 432 +index_buffer: 422 +index_buffer: 421 +index_buffer: 432 +index_buffer: 421 +index_buffer: 429 +index_buffer: 432 +index_buffer: 433 +index_buffer: 428 +index_buffer: 432 +index_buffer: 428 +index_buffer: 422 +index_buffer: 431 +index_buffer: 434 +index_buffer: 433 +index_buffer: 431 +index_buffer: 433 +index_buffer: 432 +index_buffer: 435 +index_buffer: 436 +index_buffer: 437 +index_buffer: 435 +index_buffer: 437 +index_buffer: 438 +index_buffer: 438 +index_buffer: 431 +index_buffer: 430 +index_buffer: 438 +index_buffer: 430 +index_buffer: 435 +index_buffer: 438 +index_buffer: 439 +index_buffer: 434 +index_buffer: 438 +index_buffer: 434 +index_buffer: 431 +index_buffer: 437 +index_buffer: 440 +index_buffer: 439 +index_buffer: 437 +index_buffer: 439 +index_buffer: 438 +index_buffer: 441 +index_buffer: 442 +index_buffer: 443 +index_buffer: 441 +index_buffer: 443 +index_buffer: 444 +index_buffer: 437 +index_buffer: 436 +index_buffer: 442 +index_buffer: 437 +index_buffer: 442 +index_buffer: 441 +index_buffer: 441 +index_buffer: 445 +index_buffer: 440 +index_buffer: 441 +index_buffer: 440 +index_buffer: 437 +index_buffer: 441 +index_buffer: 444 +index_buffer: 446 +index_buffer: 441 +index_buffer: 446 +index_buffer: 445 +index_buffer: 447 +index_buffer: 126 +index_buffer: 448 +index_buffer: 447 +index_buffer: 448 +index_buffer: 449 +index_buffer: 444 +index_buffer: 443 +index_buffer: 447 +index_buffer: 444 +index_buffer: 447 +index_buffer: 449 +index_buffer: 449 +index_buffer: 450 +index_buffer: 446 +index_buffer: 449 +index_buffer: 446 +index_buffer: 444 +index_buffer: 451 +index_buffer: 450 +index_buffer: 449 +index_buffer: 451 +index_buffer: 449 +index_buffer: 448 +index_buffer: 452 +index_buffer: 121 +index_buffer: 120 +index_buffer: 452 +index_buffer: 120 +index_buffer: 453 +index_buffer: 452 +index_buffer: 448 +index_buffer: 126 +index_buffer: 452 +index_buffer: 126 +index_buffer: 121 +index_buffer: 448 +index_buffer: 452 +index_buffer: 454 +index_buffer: 448 +index_buffer: 454 +index_buffer: 451 +index_buffer: 453 +index_buffer: 455 +index_buffer: 454 +index_buffer: 453 +index_buffer: 454 +index_buffer: 452 +index_buffer: 456 +index_buffer: 457 +index_buffer: 116 +index_buffer: 456 +index_buffer: 116 +index_buffer: 114 +index_buffer: 120 +index_buffer: 116 +index_buffer: 457 +index_buffer: 120 +index_buffer: 457 +index_buffer: 453 +index_buffer: 458 +index_buffer: 455 +index_buffer: 453 +index_buffer: 458 +index_buffer: 453 +index_buffer: 457 +index_buffer: 457 +index_buffer: 456 +index_buffer: 459 +index_buffer: 457 +index_buffer: 459 +index_buffer: 458 +index_buffer: 110 +index_buffer: 108 +index_buffer: 460 +index_buffer: 110 +index_buffer: 460 +index_buffer: 461 +index_buffer: 456 +index_buffer: 114 +index_buffer: 110 +index_buffer: 456 +index_buffer: 110 +index_buffer: 461 +index_buffer: 461 +index_buffer: 462 +index_buffer: 459 +index_buffer: 461 +index_buffer: 459 +index_buffer: 456 +index_buffer: 463 +index_buffer: 462 +index_buffer: 461 +index_buffer: 463 +index_buffer: 461 +index_buffer: 460 +index_buffer: 464 +index_buffer: 103 +index_buffer: 102 +index_buffer: 464 +index_buffer: 102 +index_buffer: 465 +index_buffer: 460 +index_buffer: 108 +index_buffer: 103 +index_buffer: 460 +index_buffer: 103 +index_buffer: 464 +index_buffer: 466 +index_buffer: 463 +index_buffer: 460 +index_buffer: 466 +index_buffer: 460 +index_buffer: 464 +index_buffer: 466 +index_buffer: 464 +index_buffer: 465 +index_buffer: 466 +index_buffer: 465 +index_buffer: 467 +index_buffer: 94 +index_buffer: 468 +index_buffer: 469 +index_buffer: 94 +index_buffer: 469 +index_buffer: 91 +index_buffer: 91 +index_buffer: 469 +index_buffer: 470 +index_buffer: 91 +index_buffer: 470 +index_buffer: 99 +index_buffer: 471 +index_buffer: 472 +index_buffer: 470 +index_buffer: 471 +index_buffer: 470 +index_buffer: 469 +index_buffer: 468 +index_buffer: 473 +index_buffer: 471 +index_buffer: 468 +index_buffer: 471 +index_buffer: 469 +index_buffer: 474 +index_buffer: 475 +index_buffer: 83 +index_buffer: 474 +index_buffer: 83 +index_buffer: 82 +index_buffer: 475 +index_buffer: 476 +index_buffer: 90 +index_buffer: 475 +index_buffer: 90 +index_buffer: 83 +index_buffer: 475 +index_buffer: 477 +index_buffer: 478 +index_buffer: 475 +index_buffer: 478 +index_buffer: 476 +index_buffer: 479 +index_buffer: 477 +index_buffer: 475 +index_buffer: 479 +index_buffer: 475 +index_buffer: 474 +index_buffer: 480 +index_buffer: 481 +index_buffer: 482 +index_buffer: 480 +index_buffer: 482 +index_buffer: 483 +index_buffer: 481 +index_buffer: 480 +index_buffer: 484 +index_buffer: 481 +index_buffer: 484 +index_buffer: 485 +index_buffer: 486 +index_buffer: 487 +index_buffer: 488 +index_buffer: 486 +index_buffer: 488 +index_buffer: 489 +index_buffer: 490 +index_buffer: 491 +index_buffer: 492 +index_buffer: 490 +index_buffer: 492 +index_buffer: 493 +index_buffer: 494 +index_buffer: 495 +index_buffer: 496 +index_buffer: 494 +index_buffer: 496 +index_buffer: 497 +index_buffer: 498 +index_buffer: 499 +index_buffer: 495 +index_buffer: 498 +index_buffer: 495 +index_buffer: 494 +index_buffer: 500 +index_buffer: 501 +index_buffer: 499 +index_buffer: 500 +index_buffer: 499 +index_buffer: 498 +index_buffer: 502 +index_buffer: 503 +index_buffer: 501 +index_buffer: 502 +index_buffer: 501 +index_buffer: 500 +index_buffer: 504 +index_buffer: 505 +index_buffer: 503 +index_buffer: 504 +index_buffer: 503 +index_buffer: 502 +index_buffer: 506 +index_buffer: 507 +index_buffer: 505 +index_buffer: 506 +index_buffer: 505 +index_buffer: 504 +index_buffer: 508 +index_buffer: 509 +index_buffer: 507 +index_buffer: 508 +index_buffer: 507 +index_buffer: 506 +index_buffer: 510 +index_buffer: 509 +index_buffer: 508 +index_buffer: 510 +index_buffer: 508 +index_buffer: 511 +index_buffer: 512 +index_buffer: 513 +index_buffer: 510 +index_buffer: 512 +index_buffer: 510 +index_buffer: 511 +index_buffer: 514 +index_buffer: 513 +index_buffer: 512 +index_buffer: 514 +index_buffer: 512 +index_buffer: 515 +index_buffer: 516 +index_buffer: 517 +index_buffer: 518 +index_buffer: 516 +index_buffer: 518 +index_buffer: 519 +index_buffer: 520 +index_buffer: 521 +index_buffer: 516 +index_buffer: 520 +index_buffer: 516 +index_buffer: 519 +index_buffer: 522 +index_buffer: 523 +index_buffer: 524 +index_buffer: 522 +index_buffer: 524 +index_buffer: 525 +index_buffer: 525 +index_buffer: 524 +index_buffer: 526 +index_buffer: 525 +index_buffer: 526 +index_buffer: 527 +index_buffer: 524 +index_buffer: 528 +index_buffer: 529 +index_buffer: 524 +index_buffer: 529 +index_buffer: 526 +index_buffer: 523 +index_buffer: 530 +index_buffer: 528 +index_buffer: 523 +index_buffer: 528 +index_buffer: 524 +index_buffer: 45 +index_buffer: 531 +index_buffer: 2 +index_buffer: 45 +index_buffer: 2 +index_buffer: 1 +index_buffer: 532 +index_buffer: 531 +index_buffer: 45 +index_buffer: 532 +index_buffer: 45 +index_buffer: 47 +index_buffer: 48 +index_buffer: 533 +index_buffer: 532 +index_buffer: 48 +index_buffer: 532 +index_buffer: 47 +index_buffer: 534 +index_buffer: 53 +index_buffer: 52 +index_buffer: 534 +index_buffer: 52 +index_buffer: 535 +index_buffer: 535 +index_buffer: 52 +index_buffer: 55 +index_buffer: 535 +index_buffer: 55 +index_buffer: 536 +index_buffer: 536 +index_buffer: 55 +index_buffer: 57 +index_buffer: 536 +index_buffer: 57 +index_buffer: 537 +index_buffer: 537 +index_buffer: 57 +index_buffer: 59 +index_buffer: 537 +index_buffer: 59 +index_buffer: 538 +index_buffer: 538 +index_buffer: 59 +index_buffer: 61 +index_buffer: 538 +index_buffer: 61 +index_buffer: 539 +index_buffer: 61 +index_buffer: 63 +index_buffer: 540 +index_buffer: 61 +index_buffer: 540 +index_buffer: 539 +index_buffer: 63 +index_buffer: 65 +index_buffer: 541 +index_buffer: 63 +index_buffer: 541 +index_buffer: 540 +index_buffer: 65 +index_buffer: 67 +index_buffer: 124 +index_buffer: 65 +index_buffer: 124 +index_buffer: 541 +index_buffer: 542 +index_buffer: 543 +index_buffer: 544 +index_buffer: 542 +index_buffer: 544 +index_buffer: 397 +index_buffer: 545 +index_buffer: 543 +index_buffer: 542 +index_buffer: 545 +index_buffer: 542 +index_buffer: 546 +index_buffer: 547 +index_buffer: 545 +index_buffer: 546 +index_buffer: 547 +index_buffer: 546 +index_buffer: 548 +index_buffer: 549 +index_buffer: 547 +index_buffer: 548 +index_buffer: 549 +index_buffer: 548 +index_buffer: 550 +index_buffer: 551 +index_buffer: 552 +index_buffer: 553 +index_buffer: 551 +index_buffer: 553 +index_buffer: 554 +index_buffer: 555 +index_buffer: 556 +index_buffer: 552 +index_buffer: 555 +index_buffer: 552 +index_buffer: 551 +index_buffer: 557 +index_buffer: 558 +index_buffer: 559 +index_buffer: 557 +index_buffer: 559 +index_buffer: 560 +index_buffer: 561 +index_buffer: 557 +index_buffer: 560 +index_buffer: 561 +index_buffer: 560 +index_buffer: 562 +index_buffer: 563 +index_buffer: 564 +index_buffer: 561 +index_buffer: 563 +index_buffer: 561 +index_buffer: 562 +index_buffer: 565 +index_buffer: 566 +index_buffer: 564 +index_buffer: 565 +index_buffer: 564 +index_buffer: 563 +index_buffer: 567 +index_buffer: 566 +index_buffer: 565 +index_buffer: 567 +index_buffer: 565 +index_buffer: 568 +index_buffer: 569 +index_buffer: 567 +index_buffer: 568 +index_buffer: 569 +index_buffer: 568 +index_buffer: 570 +index_buffer: 571 +index_buffer: 569 +index_buffer: 570 +index_buffer: 571 +index_buffer: 570 +index_buffer: 572 +index_buffer: 66 +index_buffer: 571 +index_buffer: 572 +index_buffer: 66 +index_buffer: 572 +index_buffer: 67 +index_buffer: 573 +index_buffer: 42 +index_buffer: 0 +index_buffer: 573 +index_buffer: 0 +index_buffer: 574 +index_buffer: 575 +index_buffer: 573 +index_buffer: 574 +index_buffer: 575 +index_buffer: 574 +index_buffer: 40 +index_buffer: 576 +index_buffer: 577 +index_buffer: 37 +index_buffer: 576 +index_buffer: 37 +index_buffer: 388 +index_buffer: 389 +index_buffer: 578 +index_buffer: 576 +index_buffer: 389 +index_buffer: 576 +index_buffer: 388 +index_buffer: 579 +index_buffer: 578 +index_buffer: 389 +index_buffer: 579 +index_buffer: 389 +index_buffer: 390 +index_buffer: 390 +index_buffer: 391 +index_buffer: 580 +index_buffer: 390 +index_buffer: 580 +index_buffer: 579 +index_buffer: 392 +index_buffer: 581 +index_buffer: 580 +index_buffer: 392 +index_buffer: 580 +index_buffer: 391 +index_buffer: 393 +index_buffer: 582 +index_buffer: 581 +index_buffer: 393 +index_buffer: 581 +index_buffer: 392 +index_buffer: 583 +index_buffer: 582 +index_buffer: 393 +index_buffer: 583 +index_buffer: 393 +index_buffer: 394 +index_buffer: 584 +index_buffer: 583 +index_buffer: 394 +index_buffer: 584 +index_buffer: 394 +index_buffer: 395 +index_buffer: 585 +index_buffer: 584 +index_buffer: 395 +index_buffer: 585 +index_buffer: 395 +index_buffer: 396 +index_buffer: 544 +index_buffer: 585 +index_buffer: 396 +index_buffer: 544 +index_buffer: 396 +index_buffer: 397 +index_buffer: 586 +index_buffer: 587 +index_buffer: 588 +index_buffer: 586 +index_buffer: 588 +index_buffer: 589 +index_buffer: 73 +index_buffer: 590 +index_buffer: 587 +index_buffer: 73 +index_buffer: 587 +index_buffer: 586 +index_buffer: 87 +index_buffer: 86 +index_buffer: 548 +index_buffer: 87 +index_buffer: 548 +index_buffer: 546 +index_buffer: 86 +index_buffer: 89 +index_buffer: 550 +index_buffer: 86 +index_buffer: 550 +index_buffer: 548 +index_buffer: 95 +index_buffer: 551 +index_buffer: 554 +index_buffer: 95 +index_buffer: 554 +index_buffer: 96 +index_buffer: 98 +index_buffer: 555 +index_buffer: 551 +index_buffer: 98 +index_buffer: 551 +index_buffer: 95 +index_buffer: 560 +index_buffer: 559 +index_buffer: 105 +index_buffer: 560 +index_buffer: 105 +index_buffer: 104 +index_buffer: 562 +index_buffer: 560 +index_buffer: 104 +index_buffer: 562 +index_buffer: 104 +index_buffer: 106 +index_buffer: 563 +index_buffer: 562 +index_buffer: 106 +index_buffer: 563 +index_buffer: 106 +index_buffer: 111 +index_buffer: 565 +index_buffer: 563 +index_buffer: 111 +index_buffer: 565 +index_buffer: 111 +index_buffer: 112 +index_buffer: 568 +index_buffer: 565 +index_buffer: 112 +index_buffer: 568 +index_buffer: 112 +index_buffer: 117 +index_buffer: 570 +index_buffer: 568 +index_buffer: 117 +index_buffer: 570 +index_buffer: 117 +index_buffer: 118 +index_buffer: 572 +index_buffer: 570 +index_buffer: 118 +index_buffer: 572 +index_buffer: 118 +index_buffer: 123 +index_buffer: 67 +index_buffer: 572 +index_buffer: 123 +index_buffer: 67 +index_buffer: 123 +index_buffer: 124 +index_buffer: 591 +index_buffer: 592 +index_buffer: 593 +index_buffer: 591 +index_buffer: 593 +index_buffer: 594 +index_buffer: 170 +index_buffer: 169 +index_buffer: 595 +index_buffer: 170 +index_buffer: 595 +index_buffer: 81 +index_buffer: 171 +index_buffer: 596 +index_buffer: 595 +index_buffer: 171 +index_buffer: 595 +index_buffer: 169 +index_buffer: 597 +index_buffer: 598 +index_buffer: 195 +index_buffer: 597 +index_buffer: 195 +index_buffer: 194 +index_buffer: 599 +index_buffer: 597 +index_buffer: 194 +index_buffer: 599 +index_buffer: 194 +index_buffer: 196 +index_buffer: 600 +index_buffer: 601 +index_buffer: 602 +index_buffer: 600 +index_buffer: 602 +index_buffer: 603 +index_buffer: 601 +index_buffer: 284 +index_buffer: 287 +index_buffer: 601 +index_buffer: 287 +index_buffer: 602 +index_buffer: 68 +index_buffer: 35 +index_buffer: 284 +index_buffer: 68 +index_buffer: 284 +index_buffer: 601 +index_buffer: 68 +index_buffer: 601 +index_buffer: 600 +index_buffer: 68 +index_buffer: 600 +index_buffer: 72 +index_buffer: 405 +index_buffer: 404 +index_buffer: 604 +index_buffer: 405 +index_buffer: 604 +index_buffer: 605 +index_buffer: 604 +index_buffer: 606 +index_buffer: 607 +index_buffer: 604 +index_buffer: 607 +index_buffer: 605 +index_buffer: 497 +index_buffer: 496 +index_buffer: 491 +index_buffer: 497 +index_buffer: 491 +index_buffer: 490 +index_buffer: 608 +index_buffer: 609 +index_buffer: 610 +index_buffer: 608 +index_buffer: 610 +index_buffer: 611 +index_buffer: 418 +index_buffer: 409 +index_buffer: 609 +index_buffer: 418 +index_buffer: 609 +index_buffer: 608 +index_buffer: 612 +index_buffer: 527 +index_buffer: 418 +index_buffer: 612 +index_buffer: 418 +index_buffer: 608 +index_buffer: 612 +index_buffer: 608 +index_buffer: 611 +index_buffer: 612 +index_buffer: 611 +index_buffer: 613 +index_buffer: 614 +index_buffer: 41 +index_buffer: 40 +index_buffer: 614 +index_buffer: 40 +index_buffer: 574 +index_buffer: 4 +index_buffer: 614 +index_buffer: 574 +index_buffer: 4 +index_buffer: 574 +index_buffer: 0 +index_buffer: 615 +index_buffer: 616 +index_buffer: 617 +index_buffer: 615 +index_buffer: 617 +index_buffer: 189 +index_buffer: 617 +index_buffer: 616 +index_buffer: 618 +index_buffer: 617 +index_buffer: 618 +index_buffer: 198 +index_buffer: 619 +index_buffer: 620 +index_buffer: 618 +index_buffer: 619 +index_buffer: 618 +index_buffer: 616 +index_buffer: 186 +index_buffer: 187 +index_buffer: 621 +index_buffer: 186 +index_buffer: 621 +index_buffer: 622 +index_buffer: 192 +index_buffer: 191 +index_buffer: 622 +index_buffer: 192 +index_buffer: 622 +index_buffer: 621 +index_buffer: 192 +index_buffer: 621 +index_buffer: 617 +index_buffer: 192 +index_buffer: 617 +index_buffer: 198 +index_buffer: 621 +index_buffer: 187 +index_buffer: 189 +index_buffer: 621 +index_buffer: 189 +index_buffer: 617 +index_buffer: 623 +index_buffer: 379 +index_buffer: 378 +index_buffer: 623 +index_buffer: 378 +index_buffer: 624 +index_buffer: 299 +index_buffer: 298 +index_buffer: 623 +index_buffer: 299 +index_buffer: 623 +index_buffer: 624 +index_buffer: 625 +index_buffer: 626 +index_buffer: 383 +index_buffer: 625 +index_buffer: 383 +index_buffer: 382 +index_buffer: 162 +index_buffer: 161 +index_buffer: 626 +index_buffer: 162 +index_buffer: 626 +index_buffer: 625 +index_buffer: 162 +index_buffer: 625 +index_buffer: 614 +index_buffer: 162 +index_buffer: 614 +index_buffer: 4 +index_buffer: 625 +index_buffer: 382 +index_buffer: 41 +index_buffer: 625 +index_buffer: 41 +index_buffer: 614 +index_buffer: 627 +index_buffer: 514 +index_buffer: 515 +index_buffer: 627 +index_buffer: 515 +index_buffer: 628 +index_buffer: 518 +index_buffer: 517 +index_buffer: 627 +index_buffer: 518 +index_buffer: 627 +index_buffer: 628 +index_buffer: 577 +index_buffer: 629 +index_buffer: 38 +index_buffer: 577 +index_buffer: 38 +index_buffer: 37 +index_buffer: 629 +index_buffer: 575 +index_buffer: 40 +index_buffer: 629 +index_buffer: 40 +index_buffer: 38 +index_buffer: 578 +index_buffer: 301 +index_buffer: 296 +index_buffer: 578 +index_buffer: 296 +index_buffer: 576 +index_buffer: 296 +index_buffer: 299 +index_buffer: 577 +index_buffer: 296 +index_buffer: 577 +index_buffer: 576 +index_buffer: 580 +index_buffer: 304 +index_buffer: 302 +index_buffer: 580 +index_buffer: 302 +index_buffer: 579 +index_buffer: 302 +index_buffer: 301 +index_buffer: 578 +index_buffer: 302 +index_buffer: 578 +index_buffer: 579 +index_buffer: 582 +index_buffer: 309 +index_buffer: 306 +index_buffer: 582 +index_buffer: 306 +index_buffer: 581 +index_buffer: 580 +index_buffer: 581 +index_buffer: 306 +index_buffer: 580 +index_buffer: 306 +index_buffer: 304 +index_buffer: 313 +index_buffer: 311 +index_buffer: 583 +index_buffer: 313 +index_buffer: 583 +index_buffer: 584 +index_buffer: 311 +index_buffer: 309 +index_buffer: 582 +index_buffer: 311 +index_buffer: 582 +index_buffer: 583 +index_buffer: 317 +index_buffer: 315 +index_buffer: 585 +index_buffer: 317 +index_buffer: 585 +index_buffer: 544 +index_buffer: 315 +index_buffer: 313 +index_buffer: 584 +index_buffer: 315 +index_buffer: 584 +index_buffer: 585 +index_buffer: 545 +index_buffer: 320 +index_buffer: 318 +index_buffer: 545 +index_buffer: 318 +index_buffer: 543 +index_buffer: 318 +index_buffer: 317 +index_buffer: 544 +index_buffer: 318 +index_buffer: 544 +index_buffer: 543 +index_buffer: 328 +index_buffer: 630 +index_buffer: 631 +index_buffer: 328 +index_buffer: 631 +index_buffer: 553 +index_buffer: 631 +index_buffer: 630 +index_buffer: 324 +index_buffer: 631 +index_buffer: 324 +index_buffer: 549 +index_buffer: 558 +index_buffer: 335 +index_buffer: 632 +index_buffer: 558 +index_buffer: 632 +index_buffer: 633 +index_buffer: 632 +index_buffer: 330 +index_buffer: 556 +index_buffer: 632 +index_buffer: 556 +index_buffer: 633 +index_buffer: 337 +index_buffer: 332 +index_buffer: 557 +index_buffer: 337 +index_buffer: 557 +index_buffer: 561 +index_buffer: 557 +index_buffer: 332 +index_buffer: 335 +index_buffer: 557 +index_buffer: 335 +index_buffer: 558 +index_buffer: 341 +index_buffer: 338 +index_buffer: 564 +index_buffer: 341 +index_buffer: 564 +index_buffer: 566 +index_buffer: 338 +index_buffer: 337 +index_buffer: 561 +index_buffer: 338 +index_buffer: 561 +index_buffer: 564 +index_buffer: 345 +index_buffer: 342 +index_buffer: 567 +index_buffer: 345 +index_buffer: 567 +index_buffer: 569 +index_buffer: 342 +index_buffer: 341 +index_buffer: 566 +index_buffer: 342 +index_buffer: 566 +index_buffer: 567 +index_buffer: 349 +index_buffer: 346 +index_buffer: 571 +index_buffer: 349 +index_buffer: 571 +index_buffer: 66 +index_buffer: 346 +index_buffer: 345 +index_buffer: 569 +index_buffer: 346 +index_buffer: 569 +index_buffer: 571 +index_buffer: 62 +index_buffer: 353 +index_buffer: 350 +index_buffer: 62 +index_buffer: 350 +index_buffer: 64 +index_buffer: 64 +index_buffer: 350 +index_buffer: 349 +index_buffer: 64 +index_buffer: 349 +index_buffer: 66 +index_buffer: 356 +index_buffer: 354 +index_buffer: 60 +index_buffer: 356 +index_buffer: 60 +index_buffer: 58 +index_buffer: 60 +index_buffer: 354 +index_buffer: 353 +index_buffer: 60 +index_buffer: 353 +index_buffer: 62 +index_buffer: 54 +index_buffer: 360 +index_buffer: 359 +index_buffer: 54 +index_buffer: 359 +index_buffer: 56 +index_buffer: 56 +index_buffer: 359 +index_buffer: 356 +index_buffer: 56 +index_buffer: 356 +index_buffer: 58 +index_buffer: 365 +index_buffer: 363 +index_buffer: 51 +index_buffer: 365 +index_buffer: 51 +index_buffer: 50 +index_buffer: 363 +index_buffer: 360 +index_buffer: 54 +index_buffer: 363 +index_buffer: 54 +index_buffer: 51 +index_buffer: 44 +index_buffer: 373 +index_buffer: 634 +index_buffer: 44 +index_buffer: 634 +index_buffer: 46 +index_buffer: 634 +index_buffer: 369 +index_buffer: 49 +index_buffer: 634 +index_buffer: 49 +index_buffer: 46 +index_buffer: 375 +index_buffer: 370 +index_buffer: 43 +index_buffer: 375 +index_buffer: 43 +index_buffer: 42 +index_buffer: 43 +index_buffer: 370 +index_buffer: 373 +index_buffer: 43 +index_buffer: 373 +index_buffer: 44 +index_buffer: 575 +index_buffer: 378 +index_buffer: 376 +index_buffer: 575 +index_buffer: 376 +index_buffer: 573 +index_buffer: 375 +index_buffer: 42 +index_buffer: 573 +index_buffer: 375 +index_buffer: 573 +index_buffer: 376 +index_buffer: 299 +index_buffer: 624 +index_buffer: 629 +index_buffer: 299 +index_buffer: 629 +index_buffer: 577 +index_buffer: 624 +index_buffer: 378 +index_buffer: 575 +index_buffer: 624 +index_buffer: 575 +index_buffer: 629 +index_buffer: 48 +index_buffer: 49 +index_buffer: 635 +index_buffer: 48 +index_buffer: 635 +index_buffer: 636 +index_buffer: 636 +index_buffer: 635 +index_buffer: 50 +index_buffer: 636 +index_buffer: 50 +index_buffer: 53 +index_buffer: 232 +index_buffer: 637 +index_buffer: 638 +index_buffer: 232 +index_buffer: 638 +index_buffer: 233 +index_buffer: 637 +index_buffer: 222 +index_buffer: 225 +index_buffer: 637 +index_buffer: 225 +index_buffer: 638 +index_buffer: 634 +index_buffer: 639 +index_buffer: 368 +index_buffer: 634 +index_buffer: 368 +index_buffer: 369 +index_buffer: 373 +index_buffer: 372 +index_buffer: 639 +index_buffer: 373 +index_buffer: 639 +index_buffer: 634 +index_buffer: 640 +index_buffer: 641 +index_buffer: 642 +index_buffer: 640 +index_buffer: 642 +index_buffer: 643 +index_buffer: 644 +index_buffer: 642 +index_buffer: 641 +index_buffer: 644 +index_buffer: 641 +index_buffer: 645 +index_buffer: 641 +index_buffer: 646 +index_buffer: 647 +index_buffer: 641 +index_buffer: 647 +index_buffer: 645 +index_buffer: 427 +index_buffer: 646 +index_buffer: 641 +index_buffer: 427 +index_buffer: 641 +index_buffer: 640 +index_buffer: 48 +index_buffer: 636 +index_buffer: 648 +index_buffer: 48 +index_buffer: 648 +index_buffer: 533 +index_buffer: 636 +index_buffer: 53 +index_buffer: 534 +index_buffer: 636 +index_buffer: 534 +index_buffer: 648 +index_buffer: 649 +index_buffer: 420 +index_buffer: 425 +index_buffer: 649 +index_buffer: 425 +index_buffer: 650 +index_buffer: 420 +index_buffer: 649 +index_buffer: 651 +index_buffer: 420 +index_buffer: 651 +index_buffer: 421 +index_buffer: 648 +index_buffer: 534 +index_buffer: 651 +index_buffer: 648 +index_buffer: 651 +index_buffer: 649 +index_buffer: 533 +index_buffer: 648 +index_buffer: 649 +index_buffer: 533 +index_buffer: 649 +index_buffer: 650 +index_buffer: 635 +index_buffer: 49 +index_buffer: 369 +index_buffer: 635 +index_buffer: 369 +index_buffer: 367 +index_buffer: 635 +index_buffer: 367 +index_buffer: 365 +index_buffer: 635 +index_buffer: 365 +index_buffer: 50 +index_buffer: 17 +index_buffer: 21 +index_buffer: 14 +index_buffer: 17 +index_buffer: 14 +index_buffer: 11 +index_buffer: 27 +index_buffer: 20 +index_buffer: 19 +index_buffer: 27 +index_buffer: 19 +index_buffer: 23 +index_buffer: 33 +index_buffer: 26 +index_buffer: 25 +index_buffer: 33 +index_buffer: 25 +index_buffer: 28 +index_buffer: 71 +index_buffer: 32 +index_buffer: 31 +index_buffer: 71 +index_buffer: 31 +index_buffer: 34 +index_buffer: 70 +index_buffer: 69 +index_buffer: 73 +index_buffer: 70 +index_buffer: 73 +index_buffer: 586 +index_buffer: 12 +index_buffer: 156 +index_buffer: 188 +index_buffer: 12 +index_buffer: 188 +index_buffer: 13 +index_buffer: 156 +index_buffer: 615 +index_buffer: 189 +index_buffer: 156 +index_buffer: 189 +index_buffer: 188 +index_buffer: 155 +index_buffer: 158 +index_buffer: 615 +index_buffer: 155 +index_buffer: 615 +index_buffer: 156 +index_buffer: 652 +index_buffer: 653 +index_buffer: 654 +index_buffer: 652 +index_buffer: 654 +index_buffer: 655 +index_buffer: 593 +index_buffer: 592 +index_buffer: 652 +index_buffer: 593 +index_buffer: 652 +index_buffer: 655 +index_buffer: 593 +index_buffer: 655 +index_buffer: 163 +index_buffer: 593 +index_buffer: 163 +index_buffer: 656 +index_buffer: 176 +index_buffer: 282 +index_buffer: 283 +index_buffer: 176 +index_buffer: 283 +index_buffer: 177 +index_buffer: 205 +index_buffer: 657 +index_buffer: 204 +index_buffer: 205 +index_buffer: 204 +index_buffer: 200 +index_buffer: 521 +index_buffer: 520 +index_buffer: 658 +index_buffer: 521 +index_buffer: 658 +index_buffer: 659 +index_buffer: 660 +index_buffer: 661 +index_buffer: 662 +index_buffer: 660 +index_buffer: 662 +index_buffer: 663 +index_buffer: 664 +index_buffer: 665 +index_buffer: 661 +index_buffer: 664 +index_buffer: 661 +index_buffer: 660 +index_buffer: 413 +index_buffer: 666 +index_buffer: 664 +index_buffer: 413 +index_buffer: 664 +index_buffer: 660 +index_buffer: 667 +index_buffer: 668 +index_buffer: 590 +index_buffer: 667 +index_buffer: 590 +index_buffer: 669 +index_buffer: 590 +index_buffer: 668 +index_buffer: 670 +index_buffer: 590 +index_buffer: 670 +index_buffer: 587 +index_buffer: 412 +index_buffer: 414 +index_buffer: 671 +index_buffer: 412 +index_buffer: 671 +index_buffer: 672 +index_buffer: 673 +index_buffer: 674 +index_buffer: 675 +index_buffer: 673 +index_buffer: 675 +index_buffer: 671 +index_buffer: 158 +index_buffer: 619 +index_buffer: 616 +index_buffer: 158 +index_buffer: 616 +index_buffer: 615 +index_buffer: 676 +index_buffer: 677 +index_buffer: 678 +index_buffer: 676 +index_buffer: 678 +index_buffer: 679 +index_buffer: 600 +index_buffer: 603 +index_buffer: 678 +index_buffer: 600 +index_buffer: 678 +index_buffer: 677 +index_buffer: 72 +index_buffer: 600 +index_buffer: 677 +index_buffer: 72 +index_buffer: 677 +index_buffer: 669 +index_buffer: 669 +index_buffer: 677 +index_buffer: 676 +index_buffer: 669 +index_buffer: 676 +index_buffer: 667 +index_buffer: 587 +index_buffer: 670 +index_buffer: 680 +index_buffer: 587 +index_buffer: 680 +index_buffer: 588 +index_buffer: 662 +index_buffer: 681 +index_buffer: 673 +index_buffer: 662 +index_buffer: 673 +index_buffer: 663 +index_buffer: 681 +index_buffer: 682 +index_buffer: 674 +index_buffer: 681 +index_buffer: 674 +index_buffer: 673 +index_buffer: 595 +index_buffer: 683 +index_buffer: 78 +index_buffer: 595 +index_buffer: 78 +index_buffer: 81 +index_buffer: 596 +index_buffer: 684 +index_buffer: 683 +index_buffer: 596 +index_buffer: 683 +index_buffer: 595 +index_buffer: 685 +index_buffer: 686 +index_buffer: 687 +index_buffer: 685 +index_buffer: 687 +index_buffer: 408 +index_buffer: 688 +index_buffer: 525 +index_buffer: 527 +index_buffer: 688 +index_buffer: 527 +index_buffer: 612 +index_buffer: 689 +index_buffer: 522 +index_buffer: 525 +index_buffer: 689 +index_buffer: 525 +index_buffer: 688 +index_buffer: 690 +index_buffer: 691 +index_buffer: 689 +index_buffer: 690 +index_buffer: 689 +index_buffer: 688 +index_buffer: 690 +index_buffer: 688 +index_buffer: 612 +index_buffer: 690 +index_buffer: 612 +index_buffer: 613 +index_buffer: 532 +index_buffer: 692 +index_buffer: 690 +index_buffer: 532 +index_buffer: 690 +index_buffer: 613 +index_buffer: 693 +index_buffer: 691 +index_buffer: 690 +index_buffer: 693 +index_buffer: 690 +index_buffer: 692 +index_buffer: 425 +index_buffer: 693 +index_buffer: 692 +index_buffer: 425 +index_buffer: 692 +index_buffer: 650 +index_buffer: 533 +index_buffer: 650 +index_buffer: 692 +index_buffer: 533 +index_buffer: 692 +index_buffer: 532 +index_buffer: 651 +index_buffer: 534 +index_buffer: 535 +index_buffer: 651 +index_buffer: 535 +index_buffer: 694 +index_buffer: 421 +index_buffer: 651 +index_buffer: 694 +index_buffer: 421 +index_buffer: 694 +index_buffer: 429 +index_buffer: 694 +index_buffer: 695 +index_buffer: 430 +index_buffer: 694 +index_buffer: 430 +index_buffer: 429 +index_buffer: 694 +index_buffer: 535 +index_buffer: 536 +index_buffer: 694 +index_buffer: 536 +index_buffer: 695 +index_buffer: 695 +index_buffer: 536 +index_buffer: 537 +index_buffer: 695 +index_buffer: 537 +index_buffer: 696 +index_buffer: 430 +index_buffer: 695 +index_buffer: 696 +index_buffer: 430 +index_buffer: 696 +index_buffer: 435 +index_buffer: 696 +index_buffer: 697 +index_buffer: 436 +index_buffer: 696 +index_buffer: 436 +index_buffer: 435 +index_buffer: 537 +index_buffer: 538 +index_buffer: 697 +index_buffer: 537 +index_buffer: 697 +index_buffer: 696 +index_buffer: 538 +index_buffer: 539 +index_buffer: 698 +index_buffer: 538 +index_buffer: 698 +index_buffer: 697 +index_buffer: 436 +index_buffer: 697 +index_buffer: 698 +index_buffer: 436 +index_buffer: 698 +index_buffer: 442 +index_buffer: 698 +index_buffer: 699 +index_buffer: 443 +index_buffer: 698 +index_buffer: 443 +index_buffer: 442 +index_buffer: 539 +index_buffer: 540 +index_buffer: 699 +index_buffer: 539 +index_buffer: 699 +index_buffer: 698 +index_buffer: 540 +index_buffer: 541 +index_buffer: 700 +index_buffer: 540 +index_buffer: 700 +index_buffer: 699 +index_buffer: 699 +index_buffer: 700 +index_buffer: 447 +index_buffer: 699 +index_buffer: 447 +index_buffer: 443 +index_buffer: 700 +index_buffer: 125 +index_buffer: 126 +index_buffer: 700 +index_buffer: 126 +index_buffer: 447 +index_buffer: 541 +index_buffer: 124 +index_buffer: 125 +index_buffer: 541 +index_buffer: 125 +index_buffer: 700 +index_buffer: 72 +index_buffer: 669 +index_buffer: 590 +index_buffer: 72 +index_buffer: 590 +index_buffer: 73 +index_buffer: 610 +index_buffer: 701 +index_buffer: 702 +index_buffer: 610 +index_buffer: 702 +index_buffer: 3 +index_buffer: 702 +index_buffer: 165 +index_buffer: 4 +index_buffer: 702 +index_buffer: 4 +index_buffer: 3 +index_buffer: 703 +index_buffer: 656 +index_buffer: 165 +index_buffer: 703 +index_buffer: 165 +index_buffer: 702 +index_buffer: 704 +index_buffer: 705 +index_buffer: 89 +index_buffer: 704 +index_buffer: 89 +index_buffer: 88 +index_buffer: 93 +index_buffer: 96 +index_buffer: 705 +index_buffer: 93 +index_buffer: 705 +index_buffer: 704 +index_buffer: 94 +index_buffer: 93 +index_buffer: 704 +index_buffer: 94 +index_buffer: 704 +index_buffer: 706 +index_buffer: 90 +index_buffer: 706 +index_buffer: 704 +index_buffer: 90 +index_buffer: 704 +index_buffer: 88 +index_buffer: 707 +index_buffer: 708 +index_buffer: 277 +index_buffer: 707 +index_buffer: 277 +index_buffer: 276 +index_buffer: 267 +index_buffer: 266 +index_buffer: 708 +index_buffer: 267 +index_buffer: 708 +index_buffer: 707 +index_buffer: 630 +index_buffer: 709 +index_buffer: 325 +index_buffer: 630 +index_buffer: 325 +index_buffer: 324 +index_buffer: 328 +index_buffer: 327 +index_buffer: 709 +index_buffer: 328 +index_buffer: 709 +index_buffer: 630 +index_buffer: 90 +index_buffer: 476 +index_buffer: 710 +index_buffer: 90 +index_buffer: 710 +index_buffer: 706 +index_buffer: 468 +index_buffer: 94 +index_buffer: 706 +index_buffer: 468 +index_buffer: 706 +index_buffer: 710 +index_buffer: 710 +index_buffer: 711 +index_buffer: 473 +index_buffer: 710 +index_buffer: 473 +index_buffer: 468 +index_buffer: 476 +index_buffer: 478 +index_buffer: 711 +index_buffer: 476 +index_buffer: 711 +index_buffer: 710 +index_buffer: 553 +index_buffer: 631 +index_buffer: 712 +index_buffer: 553 +index_buffer: 712 +index_buffer: 554 +index_buffer: 631 +index_buffer: 549 +index_buffer: 550 +index_buffer: 631 +index_buffer: 550 +index_buffer: 712 +index_buffer: 705 +index_buffer: 712 +index_buffer: 550 +index_buffer: 705 +index_buffer: 550 +index_buffer: 89 +index_buffer: 96 +index_buffer: 554 +index_buffer: 712 +index_buffer: 96 +index_buffer: 712 +index_buffer: 705 +index_buffer: 549 +index_buffer: 324 +index_buffer: 322 +index_buffer: 549 +index_buffer: 322 +index_buffer: 547 +index_buffer: 322 +index_buffer: 320 +index_buffer: 545 +index_buffer: 322 +index_buffer: 545 +index_buffer: 547 +index_buffer: 274 +index_buffer: 273 +index_buffer: 713 +index_buffer: 274 +index_buffer: 713 +index_buffer: 714 +index_buffer: 481 +index_buffer: 715 +index_buffer: 716 +index_buffer: 481 +index_buffer: 716 +index_buffer: 482 +index_buffer: 716 +index_buffer: 715 +index_buffer: 474 +index_buffer: 716 +index_buffer: 474 +index_buffer: 82 +index_buffer: 479 +index_buffer: 474 +index_buffer: 715 +index_buffer: 479 +index_buffer: 715 +index_buffer: 717 +index_buffer: 717 +index_buffer: 715 +index_buffer: 481 +index_buffer: 717 +index_buffer: 481 +index_buffer: 485 +index_buffer: 718 +index_buffer: 719 +index_buffer: 720 +index_buffer: 718 +index_buffer: 720 +index_buffer: 721 +index_buffer: 721 +index_buffer: 483 +index_buffer: 482 +index_buffer: 721 +index_buffer: 482 +index_buffer: 718 +index_buffer: 722 +index_buffer: 723 +index_buffer: 85 +index_buffer: 722 +index_buffer: 85 +index_buffer: 87 +index_buffer: 716 +index_buffer: 82 +index_buffer: 85 +index_buffer: 716 +index_buffer: 85 +index_buffer: 723 +index_buffer: 482 +index_buffer: 716 +index_buffer: 723 +index_buffer: 482 +index_buffer: 723 +index_buffer: 718 +index_buffer: 719 +index_buffer: 718 +index_buffer: 723 +index_buffer: 719 +index_buffer: 723 +index_buffer: 722 +index_buffer: 724 +index_buffer: 725 +index_buffer: 399 +index_buffer: 724 +index_buffer: 399 +index_buffer: 398 +index_buffer: 726 +index_buffer: 725 +index_buffer: 724 +index_buffer: 726 +index_buffer: 724 +index_buffer: 727 +index_buffer: 728 +index_buffer: 727 +index_buffer: 724 +index_buffer: 728 +index_buffer: 724 +index_buffer: 729 +index_buffer: 729 +index_buffer: 724 +index_buffer: 398 +index_buffer: 729 +index_buffer: 398 +index_buffer: 400 +index_buffer: 527 +index_buffer: 526 +index_buffer: 417 +index_buffer: 527 +index_buffer: 417 +index_buffer: 418 +index_buffer: 529 +index_buffer: 419 +index_buffer: 417 +index_buffer: 529 +index_buffer: 417 +index_buffer: 526 +index_buffer: 730 +index_buffer: 731 +index_buffer: 732 +index_buffer: 730 +index_buffer: 732 +index_buffer: 733 +index_buffer: 530 +index_buffer: 730 +index_buffer: 733 +index_buffer: 530 +index_buffer: 733 +index_buffer: 734 +index_buffer: 728 +index_buffer: 734 +index_buffer: 733 +index_buffer: 728 +index_buffer: 733 +index_buffer: 735 +index_buffer: 735 +index_buffer: 733 +index_buffer: 732 +index_buffer: 735 +index_buffer: 732 +index_buffer: 221 +index_buffer: 736 +index_buffer: 737 +index_buffer: 199 +index_buffer: 736 +index_buffer: 199 +index_buffer: 202 +index_buffer: 206 +index_buffer: 199 +index_buffer: 737 +index_buffer: 206 +index_buffer: 737 +index_buffer: 738 +index_buffer: 738 +index_buffer: 737 +index_buffer: 739 +index_buffer: 738 +index_buffer: 739 +index_buffer: 740 +index_buffer: 741 +index_buffer: 685 +index_buffer: 408 +index_buffer: 665 +index_buffer: 685 +index_buffer: 741 +index_buffer: 665 +index_buffer: 741 +index_buffer: 661 +index_buffer: 661 +index_buffer: 741 +index_buffer: 407 +index_buffer: 661 +index_buffer: 407 +index_buffer: 662 +index_buffer: 662 +index_buffer: 407 +index_buffer: 406 +index_buffer: 662 +index_buffer: 406 +index_buffer: 681 +index_buffer: 681 +index_buffer: 406 +index_buffer: 742 +index_buffer: 681 +index_buffer: 742 +index_buffer: 682 +index_buffer: 742 +index_buffer: 743 +index_buffer: 591 +index_buffer: 742 +index_buffer: 591 +index_buffer: 682 +index_buffer: 743 +index_buffer: 606 +index_buffer: 592 +index_buffer: 743 +index_buffer: 592 +index_buffer: 591 +index_buffer: 652 +index_buffer: 592 +index_buffer: 606 +index_buffer: 652 +index_buffer: 606 +index_buffer: 604 +index_buffer: 404 +index_buffer: 653 +index_buffer: 652 +index_buffer: 404 +index_buffer: 652 +index_buffer: 604 +index_buffer: 654 +index_buffer: 653 +index_buffer: 404 +index_buffer: 654 +index_buffer: 404 +index_buffer: 402 +index_buffer: 164 +index_buffer: 401 +index_buffer: 658 +index_buffer: 164 +index_buffer: 658 +index_buffer: 160 +index_buffer: 520 +index_buffer: 519 +index_buffer: 626 +index_buffer: 520 +index_buffer: 626 +index_buffer: 161 +index_buffer: 519 +index_buffer: 518 +index_buffer: 383 +index_buffer: 519 +index_buffer: 383 +index_buffer: 626 +index_buffer: 518 +index_buffer: 628 +index_buffer: 381 +index_buffer: 518 +index_buffer: 381 +index_buffer: 383 +index_buffer: 381 +index_buffer: 628 +index_buffer: 515 +index_buffer: 381 +index_buffer: 515 +index_buffer: 127 +index_buffer: 128 +index_buffer: 127 +index_buffer: 515 +index_buffer: 128 +index_buffer: 515 +index_buffer: 512 +index_buffer: 512 +index_buffer: 511 +index_buffer: 134 +index_buffer: 512 +index_buffer: 134 +index_buffer: 128 +index_buffer: 135 +index_buffer: 134 +index_buffer: 511 +index_buffer: 135 +index_buffer: 511 +index_buffer: 508 +index_buffer: 508 +index_buffer: 506 +index_buffer: 140 +index_buffer: 508 +index_buffer: 140 +index_buffer: 135 +index_buffer: 504 +index_buffer: 141 +index_buffer: 140 +index_buffer: 504 +index_buffer: 140 +index_buffer: 506 +index_buffer: 502 +index_buffer: 146 +index_buffer: 141 +index_buffer: 502 +index_buffer: 141 +index_buffer: 504 +index_buffer: 148 +index_buffer: 146 +index_buffer: 502 +index_buffer: 148 +index_buffer: 502 +index_buffer: 500 +index_buffer: 148 +index_buffer: 500 +index_buffer: 498 +index_buffer: 148 +index_buffer: 498 +index_buffer: 152 +index_buffer: 167 +index_buffer: 152 +index_buffer: 498 +index_buffer: 167 +index_buffer: 498 +index_buffer: 494 +index_buffer: 167 +index_buffer: 494 +index_buffer: 497 +index_buffer: 167 +index_buffer: 497 +index_buffer: 171 +index_buffer: 497 +index_buffer: 490 +index_buffer: 596 +index_buffer: 497 +index_buffer: 596 +index_buffer: 171 +index_buffer: 490 +index_buffer: 493 +index_buffer: 684 +index_buffer: 490 +index_buffer: 684 +index_buffer: 596 +index_buffer: 486 +index_buffer: 489 +index_buffer: 386 +index_buffer: 486 +index_buffer: 386 +index_buffer: 387 +index_buffer: 426 +index_buffer: 646 +index_buffer: 427 +index_buffer: 647 +index_buffer: 646 +index_buffer: 426 +index_buffer: 647 +index_buffer: 426 +index_buffer: 638 +index_buffer: 434 +index_buffer: 228 +index_buffer: 231 +index_buffer: 434 +index_buffer: 231 +index_buffer: 433 +index_buffer: 439 +index_buffer: 236 +index_buffer: 228 +index_buffer: 439 +index_buffer: 228 +index_buffer: 434 +index_buffer: 240 +index_buffer: 237 +index_buffer: 440 +index_buffer: 240 +index_buffer: 440 +index_buffer: 445 +index_buffer: 450 +index_buffer: 245 +index_buffer: 241 +index_buffer: 450 +index_buffer: 241 +index_buffer: 446 +index_buffer: 248 +index_buffer: 242 +index_buffer: 451 +index_buffer: 248 +index_buffer: 451 +index_buffer: 454 +index_buffer: 458 +index_buffer: 252 +index_buffer: 249 +index_buffer: 458 +index_buffer: 249 +index_buffer: 455 +index_buffer: 256 +index_buffer: 253 +index_buffer: 459 +index_buffer: 256 +index_buffer: 459 +index_buffer: 462 +index_buffer: 259 +index_buffer: 257 +index_buffer: 463 +index_buffer: 259 +index_buffer: 463 +index_buffer: 466 +index_buffer: 260 +index_buffer: 467 +index_buffer: 744 +index_buffer: 260 +index_buffer: 744 +index_buffer: 263 +index_buffer: 477 +index_buffer: 708 +index_buffer: 266 +index_buffer: 477 +index_buffer: 266 +index_buffer: 478 +index_buffer: 713 +index_buffer: 273 +index_buffer: 485 +index_buffer: 713 +index_buffer: 485 +index_buffer: 484 +index_buffer: 711 +index_buffer: 269 +index_buffer: 271 +index_buffer: 711 +index_buffer: 271 +index_buffer: 473 +index_buffer: 272 +index_buffer: 277 +index_buffer: 479 +index_buffer: 272 +index_buffer: 479 +index_buffer: 717 +index_buffer: 424 +index_buffer: 427 +index_buffer: 640 +index_buffer: 424 +index_buffer: 640 +index_buffer: 745 +index_buffer: 425 +index_buffer: 424 +index_buffer: 745 +index_buffer: 425 +index_buffer: 745 +index_buffer: 693 +index_buffer: 693 +index_buffer: 745 +index_buffer: 746 +index_buffer: 693 +index_buffer: 746 +index_buffer: 691 +index_buffer: 745 +index_buffer: 640 +index_buffer: 643 +index_buffer: 745 +index_buffer: 643 +index_buffer: 746 +index_buffer: 2 +index_buffer: 611 +index_buffer: 610 +index_buffer: 2 +index_buffer: 610 +index_buffer: 3 +index_buffer: 613 +index_buffer: 611 +index_buffer: 2 +index_buffer: 613 +index_buffer: 2 +index_buffer: 531 +index_buffer: 747 +index_buffer: 748 +index_buffer: 749 +index_buffer: 747 +index_buffer: 749 +index_buffer: 750 +index_buffer: 226 +index_buffer: 751 +index_buffer: 748 +index_buffer: 226 +index_buffer: 748 +index_buffer: 747 +index_buffer: 752 +index_buffer: 753 +index_buffer: 754 +index_buffer: 752 +index_buffer: 754 +index_buffer: 657 +index_buffer: 755 +index_buffer: 756 +index_buffer: 665 +index_buffer: 755 +index_buffer: 665 +index_buffer: 664 +index_buffer: 755 +index_buffer: 205 +index_buffer: 206 +index_buffer: 755 +index_buffer: 206 +index_buffer: 756 +index_buffer: 757 +index_buffer: 758 +index_buffer: 212 +index_buffer: 757 +index_buffer: 212 +index_buffer: 211 +index_buffer: 419 +index_buffer: 758 +index_buffer: 757 +index_buffer: 419 +index_buffer: 757 +index_buffer: 415 +index_buffer: 759 +index_buffer: 760 +index_buffer: 761 +index_buffer: 759 +index_buffer: 761 +index_buffer: 227 +index_buffer: 731 +index_buffer: 761 +index_buffer: 760 +index_buffer: 731 +index_buffer: 760 +index_buffer: 732 +index_buffer: 221 +index_buffer: 732 +index_buffer: 760 +index_buffer: 221 +index_buffer: 760 +index_buffer: 219 +index_buffer: 220 +index_buffer: 219 +index_buffer: 760 +index_buffer: 220 +index_buffer: 760 +index_buffer: 759 +index_buffer: 762 +index_buffer: 400 +index_buffer: 212 +index_buffer: 762 +index_buffer: 212 +index_buffer: 758 +index_buffer: 529 +index_buffer: 762 +index_buffer: 758 +index_buffer: 529 +index_buffer: 758 +index_buffer: 419 +index_buffer: 757 +index_buffer: 211 +index_buffer: 754 +index_buffer: 757 +index_buffer: 754 +index_buffer: 753 +index_buffer: 415 +index_buffer: 757 +index_buffer: 753 +index_buffer: 415 +index_buffer: 753 +index_buffer: 416 +index_buffer: 666 +index_buffer: 752 +index_buffer: 755 +index_buffer: 666 +index_buffer: 755 +index_buffer: 664 +index_buffer: 728 +index_buffer: 729 +index_buffer: 763 +index_buffer: 728 +index_buffer: 763 +index_buffer: 734 +index_buffer: 530 +index_buffer: 734 +index_buffer: 763 +index_buffer: 530 +index_buffer: 763 +index_buffer: 528 +index_buffer: 528 +index_buffer: 763 +index_buffer: 762 +index_buffer: 528 +index_buffer: 762 +index_buffer: 529 +index_buffer: 763 +index_buffer: 729 +index_buffer: 400 +index_buffer: 763 +index_buffer: 400 +index_buffer: 762 +index_buffer: 756 +index_buffer: 206 +index_buffer: 738 +index_buffer: 756 +index_buffer: 738 +index_buffer: 686 +index_buffer: 756 +index_buffer: 686 +index_buffer: 685 +index_buffer: 756 +index_buffer: 685 +index_buffer: 665 +index_buffer: 764 +index_buffer: 765 +index_buffer: 766 +index_buffer: 764 +index_buffer: 766 +index_buffer: 767 +index_buffer: 749 +index_buffer: 767 +index_buffer: 766 +index_buffer: 749 +index_buffer: 766 +index_buffer: 750 +index_buffer: 768 +index_buffer: 727 +index_buffer: 728 +index_buffer: 768 +index_buffer: 728 +index_buffer: 735 +index_buffer: 726 +index_buffer: 727 +index_buffer: 768 +index_buffer: 726 +index_buffer: 768 +index_buffer: 769 +index_buffer: 768 +index_buffer: 213 +index_buffer: 216 +index_buffer: 768 +index_buffer: 216 +index_buffer: 769 +index_buffer: 768 +index_buffer: 735 +index_buffer: 221 +index_buffer: 768 +index_buffer: 221 +index_buffer: 213 +index_buffer: 770 +index_buffer: 771 +index_buffer: 772 +index_buffer: 770 +index_buffer: 772 +index_buffer: 773 +index_buffer: 773 +index_buffer: 772 +index_buffer: 523 +index_buffer: 773 +index_buffer: 523 +index_buffer: 522 +index_buffer: 523 +index_buffer: 772 +index_buffer: 730 +index_buffer: 523 +index_buffer: 730 +index_buffer: 530 +index_buffer: 772 +index_buffer: 771 +index_buffer: 731 +index_buffer: 772 +index_buffer: 731 +index_buffer: 730 +index_buffer: 774 +index_buffer: 775 +index_buffer: 770 +index_buffer: 774 +index_buffer: 770 +index_buffer: 773 +index_buffer: 746 +index_buffer: 643 +index_buffer: 775 +index_buffer: 746 +index_buffer: 775 +index_buffer: 774 +index_buffer: 691 +index_buffer: 746 +index_buffer: 774 +index_buffer: 691 +index_buffer: 774 +index_buffer: 689 +index_buffer: 774 +index_buffer: 773 +index_buffer: 522 +index_buffer: 774 +index_buffer: 522 +index_buffer: 689 +index_buffer: 682 +index_buffer: 591 +index_buffer: 594 +index_buffer: 682 +index_buffer: 594 +index_buffer: 674 +index_buffer: 702 +index_buffer: 701 +index_buffer: 675 +index_buffer: 702 +index_buffer: 675 +index_buffer: 703 +index_buffer: 776 +index_buffer: 777 +index_buffer: 598 +index_buffer: 776 +index_buffer: 598 +index_buffer: 597 +index_buffer: 776 +index_buffer: 597 +index_buffer: 599 +index_buffer: 776 +index_buffer: 599 +index_buffer: 408 +index_buffer: 496 +index_buffer: 589 +index_buffer: 588 +index_buffer: 496 +index_buffer: 588 +index_buffer: 491 +index_buffer: 532 +index_buffer: 613 +index_buffer: 531 +index_buffer: 196 +index_buffer: 607 +index_buffer: 599 +index_buffer: 408 +index_buffer: 599 +index_buffer: 743 +index_buffer: 408 +index_buffer: 743 +index_buffer: 742 +index_buffer: 778 +index_buffer: 153 +index_buffer: 15 +index_buffer: 778 +index_buffer: 15 +index_buffer: 14 +index_buffer: 21 +index_buffer: 779 +index_buffer: 778 +index_buffer: 21 +index_buffer: 778 +index_buffer: 14 +index_buffer: 20 +index_buffer: 780 +index_buffer: 779 +index_buffer: 20 +index_buffer: 779 +index_buffer: 21 +index_buffer: 27 +index_buffer: 781 +index_buffer: 780 +index_buffer: 27 +index_buffer: 780 +index_buffer: 20 +index_buffer: 781 +index_buffer: 27 +index_buffer: 26 +index_buffer: 781 +index_buffer: 26 +index_buffer: 782 +index_buffer: 33 +index_buffer: 783 +index_buffer: 782 +index_buffer: 33 +index_buffer: 782 +index_buffer: 26 +index_buffer: 784 +index_buffer: 783 +index_buffer: 33 +index_buffer: 784 +index_buffer: 33 +index_buffer: 32 +index_buffer: 785 +index_buffer: 784 +index_buffer: 32 +index_buffer: 785 +index_buffer: 32 +index_buffer: 71 +index_buffer: 785 +index_buffer: 71 +index_buffer: 70 +index_buffer: 785 +index_buffer: 70 +index_buffer: 786 +index_buffer: 786 +index_buffer: 70 +index_buffer: 586 +index_buffer: 786 +index_buffer: 586 +index_buffer: 589 +index_buffer: 208 +index_buffer: 787 +index_buffer: 788 +index_buffer: 208 +index_buffer: 788 +index_buffer: 209 +index_buffer: 754 +index_buffer: 211 +index_buffer: 209 +index_buffer: 754 +index_buffer: 209 +index_buffer: 788 +index_buffer: 618 +index_buffer: 620 +index_buffer: 659 +index_buffer: 618 +index_buffer: 659 +index_buffer: 403 +index_buffer: 609 +index_buffer: 409 +index_buffer: 412 +index_buffer: 609 +index_buffer: 412 +index_buffer: 672 +index_buffer: 610 +index_buffer: 609 +index_buffer: 672 +index_buffer: 610 +index_buffer: 672 +index_buffer: 701 +index_buffer: 789 +index_buffer: 790 +index_buffer: 620 +index_buffer: 789 +index_buffer: 620 +index_buffer: 619 +index_buffer: 157 +index_buffer: 789 +index_buffer: 619 +index_buffer: 157 +index_buffer: 619 +index_buffer: 158 +index_buffer: 658 +index_buffer: 520 +index_buffer: 161 +index_buffer: 658 +index_buffer: 161 +index_buffer: 160 +index_buffer: 791 +index_buffer: 179 +index_buffer: 178 +index_buffer: 791 +index_buffer: 178 +index_buffer: 792 +index_buffer: 792 +index_buffer: 178 +index_buffer: 183 +index_buffer: 792 +index_buffer: 183 +index_buffer: 793 +index_buffer: 185 +index_buffer: 794 +index_buffer: 793 +index_buffer: 185 +index_buffer: 793 +index_buffer: 183 +index_buffer: 795 +index_buffer: 794 +index_buffer: 185 +index_buffer: 795 +index_buffer: 185 +index_buffer: 186 +index_buffer: 796 +index_buffer: 191 +index_buffer: 190 +index_buffer: 796 +index_buffer: 190 +index_buffer: 797 +index_buffer: 190 +index_buffer: 195 +index_buffer: 798 +index_buffer: 190 +index_buffer: 798 +index_buffer: 797 +index_buffer: 223 +index_buffer: 799 +index_buffer: 751 +index_buffer: 223 +index_buffer: 751 +index_buffer: 226 +index_buffer: 222 +index_buffer: 800 +index_buffer: 799 +index_buffer: 222 +index_buffer: 799 +index_buffer: 223 +index_buffer: 230 +index_buffer: 801 +index_buffer: 802 +index_buffer: 230 +index_buffer: 802 +index_buffer: 232 +index_buffer: 803 +index_buffer: 801 +index_buffer: 230 +index_buffer: 803 +index_buffer: 230 +index_buffer: 229 +index_buffer: 804 +index_buffer: 803 +index_buffer: 229 +index_buffer: 804 +index_buffer: 229 +index_buffer: 235 +index_buffer: 234 +index_buffer: 805 +index_buffer: 804 +index_buffer: 234 +index_buffer: 804 +index_buffer: 235 +index_buffer: 806 +index_buffer: 805 +index_buffer: 234 +index_buffer: 806 +index_buffer: 234 +index_buffer: 239 +index_buffer: 807 +index_buffer: 806 +index_buffer: 239 +index_buffer: 807 +index_buffer: 239 +index_buffer: 238 +index_buffer: 808 +index_buffer: 807 +index_buffer: 238 +index_buffer: 808 +index_buffer: 238 +index_buffer: 244 +index_buffer: 809 +index_buffer: 808 +index_buffer: 244 +index_buffer: 809 +index_buffer: 244 +index_buffer: 243 +index_buffer: 247 +index_buffer: 810 +index_buffer: 809 +index_buffer: 247 +index_buffer: 809 +index_buffer: 243 +index_buffer: 246 +index_buffer: 811 +index_buffer: 810 +index_buffer: 246 +index_buffer: 810 +index_buffer: 247 +index_buffer: 251 +index_buffer: 812 +index_buffer: 811 +index_buffer: 251 +index_buffer: 811 +index_buffer: 246 +index_buffer: 250 +index_buffer: 813 +index_buffer: 812 +index_buffer: 250 +index_buffer: 812 +index_buffer: 251 +index_buffer: 814 +index_buffer: 813 +index_buffer: 250 +index_buffer: 814 +index_buffer: 250 +index_buffer: 255 +index_buffer: 815 +index_buffer: 814 +index_buffer: 255 +index_buffer: 815 +index_buffer: 255 +index_buffer: 254 +index_buffer: 816 +index_buffer: 815 +index_buffer: 254 +index_buffer: 816 +index_buffer: 254 +index_buffer: 258 +index_buffer: 817 +index_buffer: 816 +index_buffer: 258 +index_buffer: 817 +index_buffer: 258 +index_buffer: 261 +index_buffer: 818 +index_buffer: 819 +index_buffer: 265 +index_buffer: 818 +index_buffer: 265 +index_buffer: 820 +index_buffer: 821 +index_buffer: 818 +index_buffer: 820 +index_buffer: 821 +index_buffer: 820 +index_buffer: 270 +index_buffer: 268 +index_buffer: 822 +index_buffer: 821 +index_buffer: 268 +index_buffer: 821 +index_buffer: 270 +index_buffer: 268 +index_buffer: 267 +index_buffer: 823 +index_buffer: 268 +index_buffer: 823 +index_buffer: 822 +index_buffer: 276 +index_buffer: 275 +index_buffer: 824 +index_buffer: 276 +index_buffer: 824 +index_buffer: 825 +index_buffer: 275 +index_buffer: 274 +index_buffer: 826 +index_buffer: 275 +index_buffer: 826 +index_buffer: 824 +index_buffer: 281 +index_buffer: 280 +index_buffer: 827 +index_buffer: 281 +index_buffer: 827 +index_buffer: 828 +index_buffer: 829 +index_buffer: 830 +index_buffer: 679 +index_buffer: 829 +index_buffer: 679 +index_buffer: 678 +index_buffer: 831 +index_buffer: 603 +index_buffer: 602 +index_buffer: 831 +index_buffer: 602 +index_buffer: 832 +index_buffer: 833 +index_buffer: 832 +index_buffer: 602 +index_buffer: 833 +index_buffer: 602 +index_buffer: 287 +index_buffer: 833 +index_buffer: 287 +index_buffer: 286 +index_buffer: 833 +index_buffer: 286 +index_buffer: 834 +index_buffer: 835 +index_buffer: 834 +index_buffer: 286 +index_buffer: 835 +index_buffer: 286 +index_buffer: 289 +index_buffer: 291 +index_buffer: 836 +index_buffer: 835 +index_buffer: 291 +index_buffer: 835 +index_buffer: 289 +index_buffer: 293 +index_buffer: 837 +index_buffer: 836 +index_buffer: 293 +index_buffer: 836 +index_buffer: 291 +index_buffer: 294 +index_buffer: 838 +index_buffer: 837 +index_buffer: 294 +index_buffer: 837 +index_buffer: 293 +index_buffer: 791 +index_buffer: 838 +index_buffer: 294 +index_buffer: 791 +index_buffer: 294 +index_buffer: 179 +index_buffer: 798 +index_buffer: 195 +index_buffer: 598 +index_buffer: 798 +index_buffer: 598 +index_buffer: 839 +index_buffer: 622 +index_buffer: 840 +index_buffer: 795 +index_buffer: 622 +index_buffer: 795 +index_buffer: 186 +index_buffer: 191 +index_buffer: 796 +index_buffer: 840 +index_buffer: 191 +index_buffer: 840 +index_buffer: 622 +index_buffer: 637 +index_buffer: 841 +index_buffer: 800 +index_buffer: 637 +index_buffer: 800 +index_buffer: 222 +index_buffer: 232 +index_buffer: 802 +index_buffer: 841 +index_buffer: 232 +index_buffer: 841 +index_buffer: 637 +index_buffer: 829 +index_buffer: 678 +index_buffer: 603 +index_buffer: 829 +index_buffer: 603 +index_buffer: 831 +index_buffer: 823 +index_buffer: 267 +index_buffer: 707 +index_buffer: 823 +index_buffer: 707 +index_buffer: 842 +index_buffer: 842 +index_buffer: 707 +index_buffer: 276 +index_buffer: 842 +index_buffer: 276 +index_buffer: 825 +index_buffer: 826 +index_buffer: 274 +index_buffer: 714 +index_buffer: 826 +index_buffer: 714 +index_buffer: 843 +index_buffer: 844 +index_buffer: 845 +index_buffer: 777 +index_buffer: 844 +index_buffer: 777 +index_buffer: 846 +index_buffer: 847 +index_buffer: 844 +index_buffer: 846 +index_buffer: 847 +index_buffer: 846 +index_buffer: 848 +index_buffer: 201 +index_buffer: 849 +index_buffer: 850 +index_buffer: 201 +index_buffer: 850 +index_buffer: 202 +index_buffer: 851 +index_buffer: 849 +index_buffer: 201 +index_buffer: 851 +index_buffer: 201 +index_buffer: 203 +index_buffer: 769 +index_buffer: 852 +index_buffer: 853 +index_buffer: 769 +index_buffer: 853 +index_buffer: 726 +index_buffer: 769 +index_buffer: 216 +index_buffer: 854 +index_buffer: 769 +index_buffer: 854 +index_buffer: 852 +index_buffer: 207 +index_buffer: 855 +index_buffer: 856 +index_buffer: 207 +index_buffer: 856 +index_buffer: 208 +index_buffer: 857 +index_buffer: 858 +index_buffer: 859 +index_buffer: 857 +index_buffer: 859 +index_buffer: 860 +index_buffer: 859 +index_buffer: 861 +index_buffer: 862 +index_buffer: 859 +index_buffer: 862 +index_buffer: 860 +index_buffer: 863 +index_buffer: 864 +index_buffer: 399 +index_buffer: 863 +index_buffer: 399 +index_buffer: 725 +index_buffer: 853 +index_buffer: 863 +index_buffer: 725 +index_buffer: 853 +index_buffer: 725 +index_buffer: 726 +index_buffer: 218 +index_buffer: 765 +index_buffer: 764 +index_buffer: 218 +index_buffer: 764 +index_buffer: 865 +index_buffer: 866 +index_buffer: 736 +index_buffer: 202 +index_buffer: 866 +index_buffer: 202 +index_buffer: 850 +index_buffer: 864 +index_buffer: 855 +index_buffer: 207 +index_buffer: 864 +index_buffer: 207 +index_buffer: 399 +index_buffer: 845 +index_buffer: 839 +index_buffer: 598 +index_buffer: 845 +index_buffer: 598 +index_buffer: 777 +index_buffer: 208 +index_buffer: 856 +index_buffer: 867 +index_buffer: 208 +index_buffer: 867 +index_buffer: 787 +index_buffer: 766 +index_buffer: 765 +index_buffer: 218 +index_buffer: 766 +index_buffer: 218 +index_buffer: 217 +index_buffer: 750 +index_buffer: 766 +index_buffer: 217 +index_buffer: 750 +index_buffer: 217 +index_buffer: 220 +index_buffer: 747 +index_buffer: 750 +index_buffer: 220 +index_buffer: 747 +index_buffer: 220 +index_buffer: 759 +index_buffer: 227 +index_buffer: 226 +index_buffer: 747 +index_buffer: 227 +index_buffer: 747 +index_buffer: 759 +index_buffer: 868 +index_buffer: 869 +index_buffer: 771 +index_buffer: 868 +index_buffer: 771 +index_buffer: 770 +index_buffer: 771 +index_buffer: 869 +index_buffer: 761 +index_buffer: 771 +index_buffer: 761 +index_buffer: 731 +index_buffer: 761 +index_buffer: 869 +index_buffer: 868 +index_buffer: 761 +index_buffer: 868 +index_buffer: 227 +index_buffer: 870 +index_buffer: 644 +index_buffer: 227 +index_buffer: 870 +index_buffer: 227 +index_buffer: 868 +index_buffer: 870 +index_buffer: 642 +index_buffer: 644 +index_buffer: 643 +index_buffer: 642 +index_buffer: 870 +index_buffer: 643 +index_buffer: 870 +index_buffer: 775 +index_buffer: 775 +index_buffer: 870 +index_buffer: 868 +index_buffer: 775 +index_buffer: 868 +index_buffer: 770 +index_buffer: 644 +index_buffer: 645 +index_buffer: 224 +index_buffer: 644 +index_buffer: 224 +index_buffer: 227 +index_buffer: 645 +index_buffer: 647 +index_buffer: 225 +index_buffer: 645 +index_buffer: 225 +index_buffer: 224 +index_buffer: 638 +index_buffer: 225 +index_buffer: 647 +index_buffer: 428 +index_buffer: 233 +index_buffer: 638 +index_buffer: 428 +index_buffer: 638 +index_buffer: 426 +index_buffer: 97 +index_buffer: 871 +index_buffer: 872 +index_buffer: 97 +index_buffer: 872 +index_buffer: 98 +index_buffer: 101 +index_buffer: 105 +index_buffer: 872 +index_buffer: 101 +index_buffer: 872 +index_buffer: 871 +index_buffer: 102 +index_buffer: 101 +index_buffer: 871 +index_buffer: 102 +index_buffer: 871 +index_buffer: 873 +index_buffer: 99 +index_buffer: 873 +index_buffer: 871 +index_buffer: 99 +index_buffer: 871 +index_buffer: 97 +index_buffer: 271 +index_buffer: 270 +index_buffer: 820 +index_buffer: 271 +index_buffer: 820 +index_buffer: 874 +index_buffer: 820 +index_buffer: 265 +index_buffer: 264 +index_buffer: 820 +index_buffer: 264 +index_buffer: 874 +index_buffer: 875 +index_buffer: 331 +index_buffer: 330 +index_buffer: 875 +index_buffer: 330 +index_buffer: 632 +index_buffer: 334 +index_buffer: 875 +index_buffer: 632 +index_buffer: 334 +index_buffer: 632 +index_buffer: 335 +index_buffer: 470 +index_buffer: 876 +index_buffer: 873 +index_buffer: 470 +index_buffer: 873 +index_buffer: 99 +index_buffer: 873 +index_buffer: 876 +index_buffer: 465 +index_buffer: 873 +index_buffer: 465 +index_buffer: 102 +index_buffer: 467 +index_buffer: 465 +index_buffer: 876 +index_buffer: 467 +index_buffer: 876 +index_buffer: 744 +index_buffer: 744 +index_buffer: 876 +index_buffer: 470 +index_buffer: 744 +index_buffer: 470 +index_buffer: 472 +index_buffer: 559 +index_buffer: 558 +index_buffer: 633 +index_buffer: 559 +index_buffer: 633 +index_buffer: 877 +index_buffer: 555 +index_buffer: 877 +index_buffer: 633 +index_buffer: 555 +index_buffer: 633 +index_buffer: 556 +index_buffer: 98 +index_buffer: 872 +index_buffer: 877 +index_buffer: 98 +index_buffer: 877 +index_buffer: 555 +index_buffer: 105 +index_buffer: 559 +index_buffer: 877 +index_buffer: 105 +index_buffer: 877 +index_buffer: 872 +index_buffer: 556 +index_buffer: 330 +index_buffer: 329 +index_buffer: 556 +index_buffer: 329 +index_buffer: 552 +index_buffer: 552 +index_buffer: 329 +index_buffer: 328 +index_buffer: 552 +index_buffer: 328 +index_buffer: 553 +index_buffer: 473 +index_buffer: 271 +index_buffer: 874 +index_buffer: 473 +index_buffer: 874 +index_buffer: 471 +index_buffer: 878 +index_buffer: 817 +index_buffer: 261 +index_buffer: 878 +index_buffer: 261 +index_buffer: 262 +index_buffer: 819 +index_buffer: 878 +index_buffer: 262 +index_buffer: 819 +index_buffer: 262 +index_buffer: 265 +index_buffer: 216 +index_buffer: 215 +index_buffer: 879 +index_buffer: 216 +index_buffer: 879 +index_buffer: 854 +index_buffer: 215 +index_buffer: 218 +index_buffer: 865 +index_buffer: 215 +index_buffer: 865 +index_buffer: 879 +index_buffer: 618 +index_buffer: 403 +index_buffer: 405 +index_buffer: 618 +index_buffer: 405 +index_buffer: 198 +index_buffer: 487 +index_buffer: 172 +index_buffer: 175 +index_buffer: 487 +index_buffer: 175 +index_buffer: 488 +index_buffer: 491 +index_buffer: 588 +index_buffer: 680 +index_buffer: 491 +index_buffer: 680 +index_buffer: 492 +index_buffer: 495 +index_buffer: 786 +index_buffer: 589 +index_buffer: 495 +index_buffer: 589 +index_buffer: 496 +index_buffer: 499 +index_buffer: 785 +index_buffer: 786 +index_buffer: 499 +index_buffer: 786 +index_buffer: 495 +index_buffer: 501 +index_buffer: 784 +index_buffer: 785 +index_buffer: 501 +index_buffer: 785 +index_buffer: 499 +index_buffer: 501 +index_buffer: 503 +index_buffer: 783 +index_buffer: 501 +index_buffer: 783 +index_buffer: 784 +index_buffer: 505 +index_buffer: 782 +index_buffer: 783 +index_buffer: 505 +index_buffer: 783 +index_buffer: 503 +index_buffer: 507 +index_buffer: 781 +index_buffer: 782 +index_buffer: 507 +index_buffer: 782 +index_buffer: 505 +index_buffer: 780 +index_buffer: 781 +index_buffer: 507 +index_buffer: 780 +index_buffer: 507 +index_buffer: 509 +index_buffer: 510 +index_buffer: 779 +index_buffer: 780 +index_buffer: 510 +index_buffer: 780 +index_buffer: 509 +index_buffer: 513 +index_buffer: 778 +index_buffer: 779 +index_buffer: 513 +index_buffer: 779 +index_buffer: 510 +index_buffer: 513 +index_buffer: 514 +index_buffer: 153 +index_buffer: 513 +index_buffer: 153 +index_buffer: 778 +index_buffer: 517 +index_buffer: 516 +index_buffer: 789 +index_buffer: 517 +index_buffer: 789 +index_buffer: 157 +index_buffer: 521 +index_buffer: 790 +index_buffer: 789 +index_buffer: 521 +index_buffer: 789 +index_buffer: 516 +index_buffer: 405 +index_buffer: 605 +index_buffer: 197 +index_buffer: 405 +index_buffer: 197 +index_buffer: 198 +index_buffer: 605 +index_buffer: 607 +index_buffer: 196 +index_buffer: 605 +index_buffer: 196 +index_buffer: 197 +index_buffer: 153 +index_buffer: 514 +index_buffer: 627 +index_buffer: 153 +index_buffer: 627 +index_buffer: 154 +index_buffer: 154 +index_buffer: 627 +index_buffer: 517 +index_buffer: 154 +index_buffer: 517 +index_buffer: 157 +index_buffer: 620 +index_buffer: 790 +index_buffer: 521 +index_buffer: 620 +index_buffer: 521 +index_buffer: 659 +index_buffer: 440 +index_buffer: 237 +index_buffer: 236 +index_buffer: 440 +index_buffer: 236 +index_buffer: 439 +index_buffer: 446 +index_buffer: 241 +index_buffer: 240 +index_buffer: 446 +index_buffer: 240 +index_buffer: 445 +index_buffer: 451 +index_buffer: 242 +index_buffer: 245 +index_buffer: 451 +index_buffer: 245 +index_buffer: 450 +index_buffer: 455 +index_buffer: 249 +index_buffer: 248 +index_buffer: 455 +index_buffer: 248 +index_buffer: 454 +index_buffer: 253 +index_buffer: 252 +index_buffer: 458 +index_buffer: 253 +index_buffer: 458 +index_buffer: 459 +index_buffer: 257 +index_buffer: 256 +index_buffer: 462 +index_buffer: 257 +index_buffer: 462 +index_buffer: 463 +index_buffer: 259 +index_buffer: 466 +index_buffer: 467 +index_buffer: 259 +index_buffer: 467 +index_buffer: 260 +index_buffer: 263 +index_buffer: 744 +index_buffer: 472 +index_buffer: 263 +index_buffer: 472 +index_buffer: 264 +index_buffer: 479 +index_buffer: 277 +index_buffer: 708 +index_buffer: 479 +index_buffer: 708 +index_buffer: 477 +index_buffer: 478 +index_buffer: 266 +index_buffer: 269 +index_buffer: 478 +index_buffer: 269 +index_buffer: 711 +index_buffer: 273 +index_buffer: 272 +index_buffer: 717 +index_buffer: 273 +index_buffer: 717 +index_buffer: 485 +index_buffer: 874 +index_buffer: 264 +index_buffer: 472 +index_buffer: 874 +index_buffer: 472 +index_buffer: 471 +index_buffer: 687 +index_buffer: 880 +index_buffer: 776 +index_buffer: 687 +index_buffer: 776 +index_buffer: 408 +index_buffer: 880 +index_buffer: 846 +index_buffer: 777 +index_buffer: 880 +index_buffer: 777 +index_buffer: 776 +index_buffer: 881 +index_buffer: 848 +index_buffer: 846 +index_buffer: 881 +index_buffer: 846 +index_buffer: 880 +index_buffer: 740 +index_buffer: 881 +index_buffer: 880 +index_buffer: 740 +index_buffer: 880 +index_buffer: 687 +index_buffer: 231 +index_buffer: 233 +index_buffer: 428 +index_buffer: 231 +index_buffer: 428 +index_buffer: 433 +index_buffer: 882 +index_buffer: 847 +index_buffer: 848 +index_buffer: 882 +index_buffer: 848 +index_buffer: 883 +index_buffer: 883 +index_buffer: 858 +index_buffer: 857 +index_buffer: 883 +index_buffer: 857 +index_buffer: 882 +index_buffer: 883 +index_buffer: 848 +index_buffer: 881 +index_buffer: 883 +index_buffer: 881 +index_buffer: 884 +index_buffer: 859 +index_buffer: 858 +index_buffer: 883 +index_buffer: 859 +index_buffer: 883 +index_buffer: 884 +index_buffer: 859 +index_buffer: 884 +index_buffer: 739 +index_buffer: 859 +index_buffer: 739 +index_buffer: 861 +index_buffer: 739 +index_buffer: 884 +index_buffer: 881 +index_buffer: 739 +index_buffer: 881 +index_buffer: 740 +index_buffer: 686 +index_buffer: 738 +index_buffer: 740 +index_buffer: 686 +index_buffer: 740 +index_buffer: 687 +index_buffer: 736 +index_buffer: 861 +index_buffer: 739 +index_buffer: 736 +index_buffer: 739 +index_buffer: 737 +index_buffer: 866 +index_buffer: 862 +index_buffer: 861 +index_buffer: 866 +index_buffer: 861 +index_buffer: 736 +index_buffer: 163 +index_buffer: 655 +index_buffer: 654 +index_buffer: 163 +index_buffer: 654 +index_buffer: 164 +index_buffer: 787 +index_buffer: 203 +index_buffer: 204 +index_buffer: 787 +index_buffer: 204 +index_buffer: 788 +index_buffer: 658 +index_buffer: 401 +index_buffer: 403 +index_buffer: 658 +index_buffer: 403 +index_buffer: 659 +index_buffer: 414 +index_buffer: 413 +index_buffer: 660 +index_buffer: 414 +index_buffer: 660 +index_buffer: 663 +index_buffer: 656 +index_buffer: 163 +index_buffer: 159 +index_buffer: 656 +index_buffer: 159 +index_buffer: 165 +index_buffer: 754 +index_buffer: 788 +index_buffer: 204 +index_buffer: 754 +index_buffer: 204 +index_buffer: 657 +index_buffer: 411 +index_buffer: 416 +index_buffer: 666 +index_buffer: 411 +index_buffer: 666 +index_buffer: 413 +index_buffer: 671 +index_buffer: 414 +index_buffer: 663 +index_buffer: 671 +index_buffer: 663 +index_buffer: 673 +index_buffer: 701 +index_buffer: 672 +index_buffer: 671 +index_buffer: 701 +index_buffer: 671 +index_buffer: 675 +index_buffer: 594 +index_buffer: 593 +index_buffer: 656 +index_buffer: 594 +index_buffer: 656 +index_buffer: 703 +index_buffer: 401 +index_buffer: 164 +index_buffer: 654 +index_buffer: 401 +index_buffer: 654 +index_buffer: 402 +index_buffer: 752 +index_buffer: 657 +index_buffer: 205 +index_buffer: 752 +index_buffer: 205 +index_buffer: 755 +index_buffer: 416 +index_buffer: 753 +index_buffer: 752 +index_buffer: 416 +index_buffer: 752 +index_buffer: 666 +index_buffer: 674 +index_buffer: 594 +index_buffer: 703 +index_buffer: 674 +index_buffer: 703 +index_buffer: 675 +index_buffer: 203 +index_buffer: 787 +index_buffer: 867 +index_buffer: 203 +index_buffer: 867 +index_buffer: 851 +index_buffer: 408 +index_buffer: 742 +index_buffer: 406 +index_buffer: 408 +index_buffer: 407 +index_buffer: 741 +index_buffer: 607 +index_buffer: 606 +index_buffer: 743 +index_buffer: 607 +index_buffer: 743 +index_buffer: 599 +index_buffer: 79 +index_buffer: 885 +index_buffer: 886 +index_buffer: 79 +index_buffer: 886 +index_buffer: 80 +index_buffer: 885 +index_buffer: 79 +index_buffer: 77 +index_buffer: 885 +index_buffer: 77 +index_buffer: 887 +index_buffer: 885 +index_buffer: 722 +index_buffer: 87 +index_buffer: 885 +index_buffer: 87 +index_buffer: 886 +index_buffer: 885 +index_buffer: 887 +index_buffer: 719 +index_buffer: 885 +index_buffer: 719 +index_buffer: 722 +index_buffer: 170 +index_buffer: 81 +index_buffer: 80 +index_buffer: 170 +index_buffer: 80 +index_buffer: 397 +index_buffer: 80 +index_buffer: 886 +index_buffer: 542 +index_buffer: 80 +index_buffer: 542 +index_buffer: 397 +index_buffer: 546 +index_buffer: 542 +index_buffer: 886 +index_buffer: 546 +index_buffer: 886 +index_buffer: 87 +index_buffer: 78 +index_buffer: 74 +index_buffer: 77 +index_buffer: 78 +index_buffer: 77 +index_buffer: 79 +index_buffer: 172 +index_buffer: 680 +index_buffer: 670 +index_buffer: 172 +index_buffer: 670 +index_buffer: 173 +index_buffer: 668 +index_buffer: 176 +index_buffer: 173 +index_buffer: 668 +index_buffer: 173 +index_buffer: 670 +index_buffer: 279 +index_buffer: 676 +index_buffer: 679 +index_buffer: 279 +index_buffer: 679 +index_buffer: 280 +index_buffer: 282 +index_buffer: 667 +index_buffer: 676 +index_buffer: 282 +index_buffer: 676 +index_buffer: 279 +index_buffer: 683 +index_buffer: 384 +index_buffer: 74 +index_buffer: 683 +index_buffer: 74 +index_buffer: 78 +index_buffer: 387 +index_buffer: 384 +index_buffer: 683 +index_buffer: 387 +index_buffer: 683 +index_buffer: 684 +index_buffer: 493 +index_buffer: 492 +index_buffer: 487 +index_buffer: 493 +index_buffer: 487 +index_buffer: 486 +index_buffer: 668 +index_buffer: 667 +index_buffer: 282 +index_buffer: 668 +index_buffer: 282 +index_buffer: 176 +index_buffer: 493 +index_buffer: 486 +index_buffer: 387 +index_buffer: 493 +index_buffer: 387 +index_buffer: 684 +index_buffer: 280 +index_buffer: 679 +index_buffer: 830 +index_buffer: 280 +index_buffer: 830 +index_buffer: 827 +index_buffer: 487 +index_buffer: 492 +index_buffer: 680 +index_buffer: 487 +index_buffer: 680 +index_buffer: 172 +index_buffer: 887 +index_buffer: 77 +index_buffer: 76 +index_buffer: 887 +index_buffer: 76 +index_buffer: 888 +index_buffer: 887 +index_buffer: 888 +index_buffer: 720 +index_buffer: 887 +index_buffer: 720 +index_buffer: 719 +index_buffer: 889 +index_buffer: 890 +index_buffer: 891 +index_buffer: 889 +index_buffer: 891 +index_buffer: 892 +index_buffer: 889 +index_buffer: 893 +index_buffer: 894 +index_buffer: 889 +index_buffer: 894 +index_buffer: 890 +index_buffer: 890 +index_buffer: 895 +index_buffer: 896 +index_buffer: 890 +index_buffer: 896 +index_buffer: 891 +index_buffer: 896 +index_buffer: 895 +index_buffer: 897 +index_buffer: 896 +index_buffer: 897 +index_buffer: 898 +index_buffer: 899 +index_buffer: 900 +index_buffer: 897 +index_buffer: 899 +index_buffer: 897 +index_buffer: 895 +index_buffer: 897 +index_buffer: 901 +index_buffer: 902 +index_buffer: 897 +index_buffer: 902 +index_buffer: 898 +index_buffer: 903 +index_buffer: 904 +index_buffer: 902 +index_buffer: 903 +index_buffer: 902 +index_buffer: 901 +index_buffer: 905 +index_buffer: 906 +index_buffer: 903 +index_buffer: 905 +index_buffer: 903 +index_buffer: 901 +index_buffer: 904 +index_buffer: 903 +index_buffer: 907 +index_buffer: 904 +index_buffer: 907 +index_buffer: 908 +index_buffer: 907 +index_buffer: 909 +index_buffer: 910 +index_buffer: 907 +index_buffer: 910 +index_buffer: 908 +index_buffer: 907 +index_buffer: 911 +index_buffer: 912 +index_buffer: 907 +index_buffer: 912 +index_buffer: 909 +index_buffer: 910 +index_buffer: 909 +index_buffer: 913 +index_buffer: 910 +index_buffer: 913 +index_buffer: 914 +index_buffer: 915 +index_buffer: 916 +index_buffer: 917 +index_buffer: 915 +index_buffer: 917 +index_buffer: 918 +index_buffer: 919 +index_buffer: 915 +index_buffer: 918 +index_buffer: 919 +index_buffer: 918 +index_buffer: 920 +index_buffer: 7 +index_buffer: 921 +index_buffer: 922 +index_buffer: 7 +index_buffer: 922 +index_buffer: 8 +index_buffer: 923 +index_buffer: 924 +index_buffer: 921 +index_buffer: 923 +index_buffer: 921 +index_buffer: 7 +index_buffer: 925 +index_buffer: 924 +index_buffer: 923 +index_buffer: 925 +index_buffer: 923 +index_buffer: 926 +index_buffer: 925 +index_buffer: 926 +index_buffer: 927 +index_buffer: 925 +index_buffer: 927 +index_buffer: 928 +index_buffer: 929 +index_buffer: 930 +index_buffer: 931 +index_buffer: 929 +index_buffer: 931 +index_buffer: 932 +index_buffer: 933 +index_buffer: 934 +index_buffer: 930 +index_buffer: 933 +index_buffer: 930 +index_buffer: 929 +index_buffer: 935 +index_buffer: 936 +index_buffer: 934 +index_buffer: 935 +index_buffer: 934 +index_buffer: 933 +index_buffer: 937 +index_buffer: 936 +index_buffer: 935 +index_buffer: 937 +index_buffer: 935 +index_buffer: 938 +index_buffer: 939 +index_buffer: 937 +index_buffer: 938 +index_buffer: 939 +index_buffer: 938 +index_buffer: 940 +index_buffer: 941 +index_buffer: 939 +index_buffer: 940 +index_buffer: 941 +index_buffer: 940 +index_buffer: 942 +index_buffer: 943 +index_buffer: 941 +index_buffer: 942 +index_buffer: 943 +index_buffer: 942 +index_buffer: 944 +index_buffer: 945 +index_buffer: 943 +index_buffer: 944 +index_buffer: 945 +index_buffer: 944 +index_buffer: 946 +index_buffer: 947 +index_buffer: 914 +index_buffer: 913 +index_buffer: 947 +index_buffer: 913 +index_buffer: 948 +index_buffer: 913 +index_buffer: 949 +index_buffer: 950 +index_buffer: 913 +index_buffer: 950 +index_buffer: 948 +index_buffer: 951 +index_buffer: 947 +index_buffer: 948 +index_buffer: 951 +index_buffer: 948 +index_buffer: 952 +index_buffer: 76 +index_buffer: 75 +index_buffer: 953 +index_buffer: 76 +index_buffer: 953 +index_buffer: 954 +index_buffer: 955 +index_buffer: 956 +index_buffer: 957 +index_buffer: 955 +index_buffer: 957 +index_buffer: 958 +index_buffer: 959 +index_buffer: 960 +index_buffer: 961 +index_buffer: 959 +index_buffer: 961 +index_buffer: 962 +index_buffer: 963 +index_buffer: 959 +index_buffer: 962 +index_buffer: 963 +index_buffer: 962 +index_buffer: 964 +index_buffer: 965 +index_buffer: 966 +index_buffer: 959 +index_buffer: 965 +index_buffer: 959 +index_buffer: 963 +index_buffer: 966 +index_buffer: 967 +index_buffer: 960 +index_buffer: 966 +index_buffer: 960 +index_buffer: 959 +index_buffer: 968 +index_buffer: 969 +index_buffer: 970 +index_buffer: 968 +index_buffer: 970 +index_buffer: 971 +index_buffer: 972 +index_buffer: 973 +index_buffer: 969 +index_buffer: 972 +index_buffer: 969 +index_buffer: 968 +index_buffer: 974 +index_buffer: 975 +index_buffer: 969 +index_buffer: 974 +index_buffer: 969 +index_buffer: 973 +index_buffer: 969 +index_buffer: 975 +index_buffer: 976 +index_buffer: 969 +index_buffer: 976 +index_buffer: 970 +index_buffer: 977 +index_buffer: 978 +index_buffer: 979 +index_buffer: 977 +index_buffer: 979 +index_buffer: 980 +index_buffer: 981 +index_buffer: 982 +index_buffer: 979 +index_buffer: 981 +index_buffer: 979 +index_buffer: 978 +index_buffer: 979 +index_buffer: 982 +index_buffer: 983 +index_buffer: 979 +index_buffer: 983 +index_buffer: 984 +index_buffer: 979 +index_buffer: 984 +index_buffer: 985 +index_buffer: 979 +index_buffer: 985 +index_buffer: 980 +index_buffer: 985 +index_buffer: 984 +index_buffer: 986 +index_buffer: 985 +index_buffer: 986 +index_buffer: 987 +index_buffer: 983 +index_buffer: 988 +index_buffer: 986 +index_buffer: 983 +index_buffer: 986 +index_buffer: 984 +index_buffer: 986 +index_buffer: 988 +index_buffer: 989 +index_buffer: 986 +index_buffer: 989 +index_buffer: 990 +index_buffer: 987 +index_buffer: 986 +index_buffer: 990 +index_buffer: 987 +index_buffer: 990 +index_buffer: 991 +index_buffer: 991 +index_buffer: 990 +index_buffer: 992 +index_buffer: 991 +index_buffer: 992 +index_buffer: 993 +index_buffer: 990 +index_buffer: 989 +index_buffer: 994 +index_buffer: 990 +index_buffer: 994 +index_buffer: 992 +index_buffer: 992 +index_buffer: 994 +index_buffer: 995 +index_buffer: 992 +index_buffer: 995 +index_buffer: 996 +index_buffer: 993 +index_buffer: 992 +index_buffer: 996 +index_buffer: 993 +index_buffer: 996 +index_buffer: 997 +index_buffer: 996 +index_buffer: 998 +index_buffer: 999 +index_buffer: 996 +index_buffer: 999 +index_buffer: 997 +index_buffer: 996 +index_buffer: 995 +index_buffer: 1000 +index_buffer: 996 +index_buffer: 1000 +index_buffer: 998 +index_buffer: 998 +index_buffer: 1000 +index_buffer: 1001 +index_buffer: 998 +index_buffer: 1001 +index_buffer: 1002 +index_buffer: 999 +index_buffer: 998 +index_buffer: 1002 +index_buffer: 999 +index_buffer: 1002 +index_buffer: 1003 +index_buffer: 1004 +index_buffer: 1005 +index_buffer: 1006 +index_buffer: 1004 +index_buffer: 1006 +index_buffer: 1007 +index_buffer: 1007 +index_buffer: 917 +index_buffer: 1008 +index_buffer: 1007 +index_buffer: 1008 +index_buffer: 1004 +index_buffer: 1008 +index_buffer: 1009 +index_buffer: 1010 +index_buffer: 1008 +index_buffer: 1010 +index_buffer: 1004 +index_buffer: 1005 +index_buffer: 1004 +index_buffer: 1010 +index_buffer: 1005 +index_buffer: 1010 +index_buffer: 1011 +index_buffer: 1012 +index_buffer: 1013 +index_buffer: 1011 +index_buffer: 1012 +index_buffer: 1011 +index_buffer: 1010 +index_buffer: 1009 +index_buffer: 1014 +index_buffer: 1012 +index_buffer: 1009 +index_buffer: 1012 +index_buffer: 1010 +index_buffer: 1015 +index_buffer: 1012 +index_buffer: 1014 +index_buffer: 1015 +index_buffer: 1014 +index_buffer: 1016 +index_buffer: 1015 +index_buffer: 1017 +index_buffer: 1013 +index_buffer: 1015 +index_buffer: 1013 +index_buffer: 1012 +index_buffer: 1015 +index_buffer: 1018 +index_buffer: 1019 +index_buffer: 1015 +index_buffer: 1019 +index_buffer: 1017 +index_buffer: 1018 +index_buffer: 1015 +index_buffer: 1016 +index_buffer: 1018 +index_buffer: 1016 +index_buffer: 1020 +index_buffer: 1018 +index_buffer: 1020 +index_buffer: 1021 +index_buffer: 1018 +index_buffer: 1021 +index_buffer: 1022 +index_buffer: 1022 +index_buffer: 1023 +index_buffer: 1019 +index_buffer: 1022 +index_buffer: 1019 +index_buffer: 1018 +index_buffer: 1023 +index_buffer: 1022 +index_buffer: 1024 +index_buffer: 1023 +index_buffer: 1024 +index_buffer: 1025 +index_buffer: 1021 +index_buffer: 1026 +index_buffer: 1024 +index_buffer: 1021 +index_buffer: 1024 +index_buffer: 1022 +index_buffer: 1024 +index_buffer: 1026 +index_buffer: 1027 +index_buffer: 1024 +index_buffer: 1027 +index_buffer: 1028 +index_buffer: 1025 +index_buffer: 1024 +index_buffer: 1028 +index_buffer: 1025 +index_buffer: 1028 +index_buffer: 1029 +index_buffer: 1030 +index_buffer: 1031 +index_buffer: 1032 +index_buffer: 1030 +index_buffer: 1032 +index_buffer: 893 +index_buffer: 1033 +index_buffer: 1030 +index_buffer: 893 +index_buffer: 1033 +index_buffer: 893 +index_buffer: 889 +index_buffer: 1034 +index_buffer: 1035 +index_buffer: 1031 +index_buffer: 1034 +index_buffer: 1031 +index_buffer: 1030 +index_buffer: 1036 +index_buffer: 1037 +index_buffer: 1038 +index_buffer: 1036 +index_buffer: 1038 +index_buffer: 1039 +index_buffer: 1037 +index_buffer: 1040 +index_buffer: 1041 +index_buffer: 1037 +index_buffer: 1041 +index_buffer: 1038 +index_buffer: 1039 +index_buffer: 1038 +index_buffer: 1042 +index_buffer: 1039 +index_buffer: 1042 +index_buffer: 9 +index_buffer: 1029 +index_buffer: 1028 +index_buffer: 1043 +index_buffer: 1029 +index_buffer: 1043 +index_buffer: 1044 +index_buffer: 1028 +index_buffer: 1027 +index_buffer: 1045 +index_buffer: 1028 +index_buffer: 1045 +index_buffer: 1043 +index_buffer: 1046 +index_buffer: 1043 +index_buffer: 1045 +index_buffer: 1046 +index_buffer: 1045 +index_buffer: 1047 +index_buffer: 1048 +index_buffer: 1044 +index_buffer: 1043 +index_buffer: 1048 +index_buffer: 1043 +index_buffer: 1046 +index_buffer: 174 +index_buffer: 1049 +index_buffer: 1050 +index_buffer: 174 +index_buffer: 1050 +index_buffer: 175 +index_buffer: 174 +index_buffer: 177 +index_buffer: 1051 +index_buffer: 174 +index_buffer: 1051 +index_buffer: 1049 +index_buffer: 1052 +index_buffer: 1053 +index_buffer: 1054 +index_buffer: 1052 +index_buffer: 1054 +index_buffer: 1055 +index_buffer: 1056 +index_buffer: 1057 +index_buffer: 1055 +index_buffer: 1056 +index_buffer: 1055 +index_buffer: 1054 +index_buffer: 892 +index_buffer: 891 +index_buffer: 1055 +index_buffer: 892 +index_buffer: 1055 +index_buffer: 1057 +index_buffer: 1055 +index_buffer: 891 +index_buffer: 896 +index_buffer: 1055 +index_buffer: 896 +index_buffer: 1052 +index_buffer: 1058 +index_buffer: 1057 +index_buffer: 1056 +index_buffer: 1058 +index_buffer: 1056 +index_buffer: 1059 +index_buffer: 1058 +index_buffer: 1059 +index_buffer: 1060 +index_buffer: 1058 +index_buffer: 1060 +index_buffer: 1061 +index_buffer: 1061 +index_buffer: 1062 +index_buffer: 1063 +index_buffer: 1061 +index_buffer: 1063 +index_buffer: 1058 +index_buffer: 1058 +index_buffer: 1063 +index_buffer: 892 +index_buffer: 1058 +index_buffer: 892 +index_buffer: 1057 +index_buffer: 1064 +index_buffer: 1065 +index_buffer: 1066 +index_buffer: 1064 +index_buffer: 1066 +index_buffer: 1067 +index_buffer: 1068 +index_buffer: 1069 +index_buffer: 1067 +index_buffer: 1068 +index_buffer: 1067 +index_buffer: 1066 +index_buffer: 1067 +index_buffer: 1069 +index_buffer: 1070 +index_buffer: 1067 +index_buffer: 1070 +index_buffer: 1071 +index_buffer: 1072 +index_buffer: 1064 +index_buffer: 1067 +index_buffer: 1072 +index_buffer: 1067 +index_buffer: 1071 +index_buffer: 1073 +index_buffer: 1074 +index_buffer: 1075 +index_buffer: 1073 +index_buffer: 1075 +index_buffer: 1076 +index_buffer: 1074 +index_buffer: 1073 +index_buffer: 1077 +index_buffer: 1074 +index_buffer: 1077 +index_buffer: 1078 +index_buffer: 1075 +index_buffer: 1074 +index_buffer: 1079 +index_buffer: 1075 +index_buffer: 1079 +index_buffer: 1080 +index_buffer: 1081 +index_buffer: 1082 +index_buffer: 1083 +index_buffer: 1081 +index_buffer: 1083 +index_buffer: 1084 +index_buffer: 1085 +index_buffer: 1086 +index_buffer: 1081 +index_buffer: 1085 +index_buffer: 1081 +index_buffer: 1084 +index_buffer: 1087 +index_buffer: 1088 +index_buffer: 1089 +index_buffer: 1087 +index_buffer: 1089 +index_buffer: 1090 +index_buffer: 1091 +index_buffer: 1088 +index_buffer: 1087 +index_buffer: 1091 +index_buffer: 1087 +index_buffer: 1092 +index_buffer: 1093 +index_buffer: 1094 +index_buffer: 1088 +index_buffer: 1093 +index_buffer: 1088 +index_buffer: 1091 +index_buffer: 1094 +index_buffer: 1095 +index_buffer: 1089 +index_buffer: 1094 +index_buffer: 1089 +index_buffer: 1088 +index_buffer: 1096 +index_buffer: 1097 +index_buffer: 1098 +index_buffer: 1096 +index_buffer: 1098 +index_buffer: 1099 +index_buffer: 1100 +index_buffer: 1097 +index_buffer: 1096 +index_buffer: 1100 +index_buffer: 1096 +index_buffer: 1101 +index_buffer: 1102 +index_buffer: 1103 +index_buffer: 1104 +index_buffer: 1102 +index_buffer: 1104 +index_buffer: 1105 +index_buffer: 1106 +index_buffer: 1102 +index_buffer: 1105 +index_buffer: 1106 +index_buffer: 1105 +index_buffer: 1107 +index_buffer: 1108 +index_buffer: 1109 +index_buffer: 1110 +index_buffer: 1108 +index_buffer: 1110 +index_buffer: 1111 +index_buffer: 1103 +index_buffer: 1109 +index_buffer: 1108 +index_buffer: 1103 +index_buffer: 1108 +index_buffer: 1104 +index_buffer: 1112 +index_buffer: 1113 +index_buffer: 1114 +index_buffer: 1112 +index_buffer: 1114 +index_buffer: 1115 +index_buffer: 1113 +index_buffer: 1112 +index_buffer: 1111 +index_buffer: 1113 +index_buffer: 1111 +index_buffer: 1110 +index_buffer: 1116 +index_buffer: 1117 +index_buffer: 1118 +index_buffer: 1116 +index_buffer: 1118 +index_buffer: 1119 +index_buffer: 1115 +index_buffer: 1114 +index_buffer: 1116 +index_buffer: 1115 +index_buffer: 1116 +index_buffer: 1119 +index_buffer: 1120 +index_buffer: 1121 +index_buffer: 1122 +index_buffer: 1120 +index_buffer: 1122 +index_buffer: 1123 +index_buffer: 1117 +index_buffer: 1121 +index_buffer: 1120 +index_buffer: 1117 +index_buffer: 1120 +index_buffer: 1118 +index_buffer: 1124 +index_buffer: 1125 +index_buffer: 1126 +index_buffer: 1124 +index_buffer: 1126 +index_buffer: 1127 +index_buffer: 1123 +index_buffer: 1122 +index_buffer: 1125 +index_buffer: 1123 +index_buffer: 1125 +index_buffer: 1124 +index_buffer: 1128 +index_buffer: 1129 +index_buffer: 1130 +index_buffer: 1128 +index_buffer: 1130 +index_buffer: 1131 +index_buffer: 1126 +index_buffer: 1129 +index_buffer: 1128 +index_buffer: 1126 +index_buffer: 1128 +index_buffer: 1127 +index_buffer: 1132 +index_buffer: 1133 +index_buffer: 1134 +index_buffer: 1132 +index_buffer: 1134 +index_buffer: 1135 +index_buffer: 1131 +index_buffer: 1130 +index_buffer: 1134 +index_buffer: 1131 +index_buffer: 1134 +index_buffer: 1133 +index_buffer: 1136 +index_buffer: 1137 +index_buffer: 1138 +index_buffer: 1136 +index_buffer: 1138 +index_buffer: 1139 +index_buffer: 1137 +index_buffer: 1132 +index_buffer: 1135 +index_buffer: 1137 +index_buffer: 1135 +index_buffer: 1138 +index_buffer: 1140 +index_buffer: 1141 +index_buffer: 1142 +index_buffer: 1140 +index_buffer: 1142 +index_buffer: 1143 +index_buffer: 1144 +index_buffer: 1140 +index_buffer: 1143 +index_buffer: 1144 +index_buffer: 1143 +index_buffer: 1145 +index_buffer: 1146 +index_buffer: 1147 +index_buffer: 1148 +index_buffer: 1146 +index_buffer: 1148 +index_buffer: 1149 +index_buffer: 1148 +index_buffer: 1150 +index_buffer: 1151 +index_buffer: 1148 +index_buffer: 1151 +index_buffer: 1149 +index_buffer: 1152 +index_buffer: 1153 +index_buffer: 278 +index_buffer: 1152 +index_buffer: 278 +index_buffer: 281 +index_buffer: 278 +index_buffer: 1153 +index_buffer: 1154 +index_buffer: 278 +index_buffer: 1154 +index_buffer: 283 +index_buffer: 1155 +index_buffer: 1156 +index_buffer: 1157 +index_buffer: 1155 +index_buffer: 1157 +index_buffer: 1158 +index_buffer: 1159 +index_buffer: 1160 +index_buffer: 1156 +index_buffer: 1159 +index_buffer: 1156 +index_buffer: 1155 +index_buffer: 908 +index_buffer: 910 +index_buffer: 1156 +index_buffer: 908 +index_buffer: 1156 +index_buffer: 1160 +index_buffer: 1157 +index_buffer: 1156 +index_buffer: 910 +index_buffer: 1157 +index_buffer: 910 +index_buffer: 914 +index_buffer: 1161 +index_buffer: 1162 +index_buffer: 1160 +index_buffer: 1161 +index_buffer: 1160 +index_buffer: 1159 +index_buffer: 1161 +index_buffer: 1163 +index_buffer: 1164 +index_buffer: 1161 +index_buffer: 1164 +index_buffer: 1162 +index_buffer: 1162 +index_buffer: 1164 +index_buffer: 902 +index_buffer: 1162 +index_buffer: 902 +index_buffer: 904 +index_buffer: 1160 +index_buffer: 1162 +index_buffer: 904 +index_buffer: 1160 +index_buffer: 904 +index_buffer: 908 +index_buffer: 1164 +index_buffer: 1163 +index_buffer: 1165 +index_buffer: 1164 +index_buffer: 1165 +index_buffer: 1166 +index_buffer: 1165 +index_buffer: 1053 +index_buffer: 1052 +index_buffer: 1165 +index_buffer: 1052 +index_buffer: 1166 +index_buffer: 1166 +index_buffer: 1052 +index_buffer: 896 +index_buffer: 1166 +index_buffer: 896 +index_buffer: 898 +index_buffer: 902 +index_buffer: 1164 +index_buffer: 1166 +index_buffer: 902 +index_buffer: 1166 +index_buffer: 898 +index_buffer: 1167 +index_buffer: 1168 +index_buffer: 1169 +index_buffer: 1167 +index_buffer: 1169 +index_buffer: 1170 +index_buffer: 1169 +index_buffer: 1168 +index_buffer: 1171 +index_buffer: 1169 +index_buffer: 1171 +index_buffer: 1172 +index_buffer: 1171 +index_buffer: 1173 +index_buffer: 1174 +index_buffer: 1171 +index_buffer: 1174 +index_buffer: 1172 +index_buffer: 1173 +index_buffer: 1175 +index_buffer: 1176 +index_buffer: 1173 +index_buffer: 1176 +index_buffer: 1174 +index_buffer: 1175 +index_buffer: 1177 +index_buffer: 1178 +index_buffer: 1175 +index_buffer: 1178 +index_buffer: 1176 +index_buffer: 1178 +index_buffer: 1177 +index_buffer: 1179 +index_buffer: 1178 +index_buffer: 1179 +index_buffer: 1180 +index_buffer: 1180 +index_buffer: 1179 +index_buffer: 1181 +index_buffer: 1180 +index_buffer: 1181 +index_buffer: 1182 +index_buffer: 1182 +index_buffer: 1181 +index_buffer: 1183 +index_buffer: 1182 +index_buffer: 1183 +index_buffer: 1184 +index_buffer: 1184 +index_buffer: 1183 +index_buffer: 1185 +index_buffer: 1184 +index_buffer: 1185 +index_buffer: 1186 +index_buffer: 1186 +index_buffer: 1185 +index_buffer: 1187 +index_buffer: 1186 +index_buffer: 1187 +index_buffer: 1188 +index_buffer: 1187 +index_buffer: 1189 +index_buffer: 1190 +index_buffer: 1187 +index_buffer: 1190 +index_buffer: 1188 +index_buffer: 1189 +index_buffer: 1191 +index_buffer: 1192 +index_buffer: 1189 +index_buffer: 1192 +index_buffer: 1190 +index_buffer: 1191 +index_buffer: 1193 +index_buffer: 1194 +index_buffer: 1191 +index_buffer: 1194 +index_buffer: 1192 +index_buffer: 1193 +index_buffer: 1195 +index_buffer: 1196 +index_buffer: 1193 +index_buffer: 1196 +index_buffer: 1194 +index_buffer: 1197 +index_buffer: 1198 +index_buffer: 1199 +index_buffer: 1197 +index_buffer: 1199 +index_buffer: 1200 +index_buffer: 1199 +index_buffer: 1201 +index_buffer: 1202 +index_buffer: 1199 +index_buffer: 1202 +index_buffer: 1200 +index_buffer: 1203 +index_buffer: 1204 +index_buffer: 1205 +index_buffer: 1203 +index_buffer: 1205 +index_buffer: 1206 +index_buffer: 1205 +index_buffer: 1204 +index_buffer: 1207 +index_buffer: 1205 +index_buffer: 1207 +index_buffer: 1208 +index_buffer: 1207 +index_buffer: 1209 +index_buffer: 1210 +index_buffer: 1207 +index_buffer: 1210 +index_buffer: 1208 +index_buffer: 1210 +index_buffer: 1209 +index_buffer: 1211 +index_buffer: 1210 +index_buffer: 1211 +index_buffer: 1212 +index_buffer: 1211 +index_buffer: 1213 +index_buffer: 1214 +index_buffer: 1211 +index_buffer: 1214 +index_buffer: 1212 +index_buffer: 1214 +index_buffer: 1213 +index_buffer: 1215 +index_buffer: 1214 +index_buffer: 1215 +index_buffer: 1216 +index_buffer: 1215 +index_buffer: 1217 +index_buffer: 1218 +index_buffer: 1215 +index_buffer: 1218 +index_buffer: 1216 +index_buffer: 1218 +index_buffer: 1217 +index_buffer: 1219 +index_buffer: 1218 +index_buffer: 1219 +index_buffer: 1220 +index_buffer: 1219 +index_buffer: 1221 +index_buffer: 1222 +index_buffer: 1219 +index_buffer: 1222 +index_buffer: 1220 +index_buffer: 1222 +index_buffer: 1221 +index_buffer: 1223 +index_buffer: 1222 +index_buffer: 1223 +index_buffer: 1224 +index_buffer: 1223 +index_buffer: 1225 +index_buffer: 1226 +index_buffer: 1223 +index_buffer: 1226 +index_buffer: 1224 +index_buffer: 1225 +index_buffer: 1227 +index_buffer: 1228 +index_buffer: 1225 +index_buffer: 1228 +index_buffer: 1226 +index_buffer: 1228 +index_buffer: 1227 +index_buffer: 1229 +index_buffer: 1228 +index_buffer: 1229 +index_buffer: 1230 +index_buffer: 1229 +index_buffer: 1231 +index_buffer: 1232 +index_buffer: 1229 +index_buffer: 1232 +index_buffer: 1230 +index_buffer: 1232 +index_buffer: 1231 +index_buffer: 1233 +index_buffer: 1232 +index_buffer: 1233 +index_buffer: 1234 +index_buffer: 1234 +index_buffer: 1233 +index_buffer: 1235 +index_buffer: 1234 +index_buffer: 1235 +index_buffer: 1236 +index_buffer: 1236 +index_buffer: 1235 +index_buffer: 1237 +index_buffer: 1236 +index_buffer: 1237 +index_buffer: 1238 +index_buffer: 1238 +index_buffer: 1237 +index_buffer: 1239 +index_buffer: 1238 +index_buffer: 1239 +index_buffer: 1240 +index_buffer: 1241 +index_buffer: 1242 +index_buffer: 1243 +index_buffer: 1241 +index_buffer: 1243 +index_buffer: 1244 +index_buffer: 1243 +index_buffer: 1242 +index_buffer: 1245 +index_buffer: 1243 +index_buffer: 1245 +index_buffer: 1246 +index_buffer: 1245 +index_buffer: 1247 +index_buffer: 1248 +index_buffer: 1245 +index_buffer: 1248 +index_buffer: 1246 +index_buffer: 1247 +index_buffer: 1249 +index_buffer: 1250 +index_buffer: 1247 +index_buffer: 1250 +index_buffer: 1248 +index_buffer: 918 +index_buffer: 917 +index_buffer: 1007 +index_buffer: 918 +index_buffer: 1007 +index_buffer: 1251 +index_buffer: 1252 +index_buffer: 1251 +index_buffer: 1007 +index_buffer: 1252 +index_buffer: 1007 +index_buffer: 1006 +index_buffer: 1252 +index_buffer: 1253 +index_buffer: 1254 +index_buffer: 1252 +index_buffer: 1254 +index_buffer: 1251 +index_buffer: 920 +index_buffer: 918 +index_buffer: 1251 +index_buffer: 920 +index_buffer: 1251 +index_buffer: 1254 +index_buffer: 75 +index_buffer: 385 +index_buffer: 1255 +index_buffer: 75 +index_buffer: 1255 +index_buffer: 953 +index_buffer: 385 +index_buffer: 386 +index_buffer: 1256 +index_buffer: 385 +index_buffer: 1256 +index_buffer: 1255 +index_buffer: 916 +index_buffer: 1257 +index_buffer: 1008 +index_buffer: 916 +index_buffer: 1008 +index_buffer: 917 +index_buffer: 1257 +index_buffer: 1258 +index_buffer: 1009 +index_buffer: 1257 +index_buffer: 1009 +index_buffer: 1008 +index_buffer: 1258 +index_buffer: 1259 +index_buffer: 1014 +index_buffer: 1258 +index_buffer: 1014 +index_buffer: 1009 +index_buffer: 1014 +index_buffer: 1259 +index_buffer: 1260 +index_buffer: 1014 +index_buffer: 1260 +index_buffer: 1016 +index_buffer: 1016 +index_buffer: 1260 +index_buffer: 1261 +index_buffer: 1016 +index_buffer: 1261 +index_buffer: 1020 +index_buffer: 1020 +index_buffer: 1261 +index_buffer: 1262 +index_buffer: 1020 +index_buffer: 1262 +index_buffer: 1021 +index_buffer: 1021 +index_buffer: 1262 +index_buffer: 1263 +index_buffer: 1021 +index_buffer: 1263 +index_buffer: 1026 +index_buffer: 1026 +index_buffer: 1263 +index_buffer: 1264 +index_buffer: 1026 +index_buffer: 1264 +index_buffer: 1027 +index_buffer: 1027 +index_buffer: 1264 +index_buffer: 1265 +index_buffer: 1027 +index_buffer: 1265 +index_buffer: 1045 +index_buffer: 1047 +index_buffer: 1045 +index_buffer: 1265 +index_buffer: 1047 +index_buffer: 1265 +index_buffer: 1266 +index_buffer: 1083 +index_buffer: 1267 +index_buffer: 1268 +index_buffer: 1083 +index_buffer: 1268 +index_buffer: 1084 +index_buffer: 1085 +index_buffer: 1084 +index_buffer: 1268 +index_buffer: 1085 +index_buffer: 1268 +index_buffer: 1269 +index_buffer: 1270 +index_buffer: 1271 +index_buffer: 1272 +index_buffer: 1273 +index_buffer: 1270 +index_buffer: 1272 +index_buffer: 1273 +index_buffer: 1272 +index_buffer: 1274 +index_buffer: 1275 +index_buffer: 1276 +index_buffer: 1277 +index_buffer: 1278 +index_buffer: 1279 +index_buffer: 1280 +index_buffer: 1278 +index_buffer: 1280 +index_buffer: 1281 +index_buffer: 1282 +index_buffer: 1278 +index_buffer: 1281 +index_buffer: 1282 +index_buffer: 1281 +index_buffer: 1283 +index_buffer: 1284 +index_buffer: 1285 +index_buffer: 1279 +index_buffer: 1284 +index_buffer: 1279 +index_buffer: 1278 +index_buffer: 1280 +index_buffer: 1279 +index_buffer: 1286 +index_buffer: 1280 +index_buffer: 1286 +index_buffer: 1287 +index_buffer: 1285 +index_buffer: 1288 +index_buffer: 1286 +index_buffer: 1285 +index_buffer: 1286 +index_buffer: 1279 +index_buffer: 1289 +index_buffer: 1290 +index_buffer: 1291 +index_buffer: 1289 +index_buffer: 1291 +index_buffer: 1292 +index_buffer: 1293 +index_buffer: 1294 +index_buffer: 1292 +index_buffer: 1293 +index_buffer: 1292 +index_buffer: 1291 +index_buffer: 1295 +index_buffer: 1296 +index_buffer: 1292 +index_buffer: 1295 +index_buffer: 1292 +index_buffer: 1294 +index_buffer: 1289 +index_buffer: 1292 +index_buffer: 1296 +index_buffer: 1289 +index_buffer: 1296 +index_buffer: 1297 +index_buffer: 1298 +index_buffer: 1299 +index_buffer: 1300 +index_buffer: 1298 +index_buffer: 1300 +index_buffer: 1301 +index_buffer: 1290 +index_buffer: 1289 +index_buffer: 1301 +index_buffer: 1290 +index_buffer: 1301 +index_buffer: 1300 +index_buffer: 1297 +index_buffer: 1302 +index_buffer: 1301 +index_buffer: 1297 +index_buffer: 1301 +index_buffer: 1289 +index_buffer: 1302 +index_buffer: 1303 +index_buffer: 1298 +index_buffer: 1302 +index_buffer: 1298 +index_buffer: 1301 +index_buffer: 1304 +index_buffer: 1305 +index_buffer: 1306 +index_buffer: 1304 +index_buffer: 1306 +index_buffer: 1307 +index_buffer: 1299 +index_buffer: 1298 +index_buffer: 1307 +index_buffer: 1299 +index_buffer: 1307 +index_buffer: 1306 +index_buffer: 1303 +index_buffer: 1308 +index_buffer: 1307 +index_buffer: 1303 +index_buffer: 1307 +index_buffer: 1298 +index_buffer: 1308 +index_buffer: 1309 +index_buffer: 1304 +index_buffer: 1308 +index_buffer: 1304 +index_buffer: 1307 +index_buffer: 1310 +index_buffer: 1311 +index_buffer: 1312 +index_buffer: 1310 +index_buffer: 1312 +index_buffer: 1313 +index_buffer: 1311 +index_buffer: 1305 +index_buffer: 1304 +index_buffer: 1311 +index_buffer: 1304 +index_buffer: 1312 +index_buffer: 1309 +index_buffer: 1314 +index_buffer: 1312 +index_buffer: 1309 +index_buffer: 1312 +index_buffer: 1304 +index_buffer: 1315 +index_buffer: 1313 +index_buffer: 1312 +index_buffer: 1315 +index_buffer: 1312 +index_buffer: 1314 +index_buffer: 1316 +index_buffer: 1003 +index_buffer: 1317 +index_buffer: 1316 +index_buffer: 1317 +index_buffer: 1318 +index_buffer: 1317 +index_buffer: 1310 +index_buffer: 1313 +index_buffer: 1317 +index_buffer: 1313 +index_buffer: 1318 +index_buffer: 1315 +index_buffer: 1319 +index_buffer: 1318 +index_buffer: 1315 +index_buffer: 1318 +index_buffer: 1313 +index_buffer: 1318 +index_buffer: 1319 +index_buffer: 1320 +index_buffer: 1318 +index_buffer: 1320 +index_buffer: 1316 +index_buffer: 997 +index_buffer: 999 +index_buffer: 1321 +index_buffer: 997 +index_buffer: 1321 +index_buffer: 1322 +index_buffer: 1003 +index_buffer: 1316 +index_buffer: 1321 +index_buffer: 1003 +index_buffer: 1321 +index_buffer: 999 +index_buffer: 1323 +index_buffer: 1321 +index_buffer: 1316 +index_buffer: 1323 +index_buffer: 1316 +index_buffer: 1320 +index_buffer: 1323 +index_buffer: 1324 +index_buffer: 1322 +index_buffer: 1323 +index_buffer: 1322 +index_buffer: 1321 +index_buffer: 993 +index_buffer: 1325 +index_buffer: 1326 +index_buffer: 993 +index_buffer: 1326 +index_buffer: 991 +index_buffer: 1325 +index_buffer: 993 +index_buffer: 997 +index_buffer: 1325 +index_buffer: 997 +index_buffer: 1322 +index_buffer: 1322 +index_buffer: 1324 +index_buffer: 1327 +index_buffer: 1322 +index_buffer: 1327 +index_buffer: 1325 +index_buffer: 1328 +index_buffer: 1326 +index_buffer: 1325 +index_buffer: 1328 +index_buffer: 1325 +index_buffer: 1327 +index_buffer: 1329 +index_buffer: 985 +index_buffer: 987 +index_buffer: 1329 +index_buffer: 987 +index_buffer: 1330 +index_buffer: 987 +index_buffer: 991 +index_buffer: 1326 +index_buffer: 987 +index_buffer: 1326 +index_buffer: 1330 +index_buffer: 1328 +index_buffer: 1331 +index_buffer: 1330 +index_buffer: 1328 +index_buffer: 1330 +index_buffer: 1326 +index_buffer: 1330 +index_buffer: 1331 +index_buffer: 1332 +index_buffer: 1330 +index_buffer: 1332 +index_buffer: 1329 +index_buffer: 977 +index_buffer: 980 +index_buffer: 1333 +index_buffer: 977 +index_buffer: 1333 +index_buffer: 1334 +index_buffer: 980 +index_buffer: 985 +index_buffer: 1329 +index_buffer: 980 +index_buffer: 1329 +index_buffer: 1333 +index_buffer: 1329 +index_buffer: 1332 +index_buffer: 1335 +index_buffer: 1329 +index_buffer: 1335 +index_buffer: 1333 +index_buffer: 1334 +index_buffer: 1333 +index_buffer: 1335 +index_buffer: 1334 +index_buffer: 1335 +index_buffer: 1336 +index_buffer: 1337 +index_buffer: 1338 +index_buffer: 971 +index_buffer: 1337 +index_buffer: 971 +index_buffer: 970 +index_buffer: 1339 +index_buffer: 1337 +index_buffer: 970 +index_buffer: 1339 +index_buffer: 970 +index_buffer: 976 +index_buffer: 1339 +index_buffer: 1340 +index_buffer: 1341 +index_buffer: 1339 +index_buffer: 1341 +index_buffer: 1337 +index_buffer: 1341 +index_buffer: 1342 +index_buffer: 1338 +index_buffer: 1341 +index_buffer: 1338 +index_buffer: 1337 +index_buffer: 960 +index_buffer: 1343 +index_buffer: 1344 +index_buffer: 960 +index_buffer: 1344 +index_buffer: 961 +index_buffer: 967 +index_buffer: 1345 +index_buffer: 1343 +index_buffer: 967 +index_buffer: 1343 +index_buffer: 960 +index_buffer: 1346 +index_buffer: 1347 +index_buffer: 1343 +index_buffer: 1346 +index_buffer: 1343 +index_buffer: 1345 +index_buffer: 1343 +index_buffer: 1347 +index_buffer: 1348 +index_buffer: 1343 +index_buffer: 1348 +index_buffer: 1344 +index_buffer: 1349 +index_buffer: 1350 +index_buffer: 480 +index_buffer: 1349 +index_buffer: 480 +index_buffer: 483 +index_buffer: 484 +index_buffer: 480 +index_buffer: 1350 +index_buffer: 484 +index_buffer: 1350 +index_buffer: 1351 +index_buffer: 488 +index_buffer: 1352 +index_buffer: 1353 +index_buffer: 488 +index_buffer: 1353 +index_buffer: 489 +index_buffer: 1354 +index_buffer: 1355 +index_buffer: 1356 +index_buffer: 1354 +index_buffer: 1356 +index_buffer: 1357 +index_buffer: 1358 +index_buffer: 1359 +index_buffer: 1360 +index_buffer: 1358 +index_buffer: 1360 +index_buffer: 1361 +index_buffer: 1359 +index_buffer: 1362 +index_buffer: 1363 +index_buffer: 1359 +index_buffer: 1363 +index_buffer: 1360 +index_buffer: 1362 +index_buffer: 1364 +index_buffer: 1365 +index_buffer: 1362 +index_buffer: 1365 +index_buffer: 1363 +index_buffer: 1364 +index_buffer: 1366 +index_buffer: 1367 +index_buffer: 1364 +index_buffer: 1367 +index_buffer: 1365 +index_buffer: 1366 +index_buffer: 1368 +index_buffer: 1369 +index_buffer: 1366 +index_buffer: 1369 +index_buffer: 1367 +index_buffer: 1368 +index_buffer: 1370 +index_buffer: 1371 +index_buffer: 1368 +index_buffer: 1371 +index_buffer: 1369 +index_buffer: 1370 +index_buffer: 1372 +index_buffer: 1373 +index_buffer: 1370 +index_buffer: 1373 +index_buffer: 1371 +index_buffer: 1373 +index_buffer: 1372 +index_buffer: 1374 +index_buffer: 1373 +index_buffer: 1374 +index_buffer: 1375 +index_buffer: 1374 +index_buffer: 1376 +index_buffer: 1377 +index_buffer: 1374 +index_buffer: 1377 +index_buffer: 1375 +index_buffer: 1377 +index_buffer: 1376 +index_buffer: 1378 +index_buffer: 1377 +index_buffer: 1378 +index_buffer: 1379 +index_buffer: 1380 +index_buffer: 1381 +index_buffer: 1382 +index_buffer: 1380 +index_buffer: 1382 +index_buffer: 1383 +index_buffer: 1382 +index_buffer: 1384 +index_buffer: 1385 +index_buffer: 1382 +index_buffer: 1385 +index_buffer: 1383 +index_buffer: 1386 +index_buffer: 1387 +index_buffer: 1388 +index_buffer: 1386 +index_buffer: 1388 +index_buffer: 1389 +index_buffer: 1390 +index_buffer: 1386 +index_buffer: 1389 +index_buffer: 1390 +index_buffer: 1389 +index_buffer: 1391 +index_buffer: 1392 +index_buffer: 1393 +index_buffer: 1386 +index_buffer: 1392 +index_buffer: 1386 +index_buffer: 1390 +index_buffer: 1393 +index_buffer: 1394 +index_buffer: 1387 +index_buffer: 1393 +index_buffer: 1387 +index_buffer: 1386 +index_buffer: 6 +index_buffer: 1395 +index_buffer: 923 +index_buffer: 6 +index_buffer: 923 +index_buffer: 7 +index_buffer: 923 +index_buffer: 1395 +index_buffer: 1396 +index_buffer: 923 +index_buffer: 1396 +index_buffer: 926 +index_buffer: 1396 +index_buffer: 1397 +index_buffer: 927 +index_buffer: 1396 +index_buffer: 927 +index_buffer: 926 +index_buffer: 929 +index_buffer: 932 +index_buffer: 1398 +index_buffer: 929 +index_buffer: 1398 +index_buffer: 1399 +index_buffer: 933 +index_buffer: 929 +index_buffer: 1399 +index_buffer: 933 +index_buffer: 1399 +index_buffer: 1400 +index_buffer: 935 +index_buffer: 933 +index_buffer: 1400 +index_buffer: 935 +index_buffer: 1400 +index_buffer: 1401 +index_buffer: 938 +index_buffer: 935 +index_buffer: 1401 +index_buffer: 938 +index_buffer: 1401 +index_buffer: 1402 +index_buffer: 940 +index_buffer: 938 +index_buffer: 1402 +index_buffer: 940 +index_buffer: 1402 +index_buffer: 1403 +index_buffer: 1404 +index_buffer: 942 +index_buffer: 940 +index_buffer: 1404 +index_buffer: 940 +index_buffer: 1403 +index_buffer: 1405 +index_buffer: 944 +index_buffer: 942 +index_buffer: 1405 +index_buffer: 942 +index_buffer: 1404 +index_buffer: 1001 +index_buffer: 946 +index_buffer: 944 +index_buffer: 1001 +index_buffer: 944 +index_buffer: 1405 +index_buffer: 1406 +index_buffer: 1407 +index_buffer: 1408 +index_buffer: 1406 +index_buffer: 1408 +index_buffer: 1266 +index_buffer: 1408 +index_buffer: 1407 +index_buffer: 1409 +index_buffer: 1408 +index_buffer: 1409 +index_buffer: 1410 +index_buffer: 1410 +index_buffer: 1409 +index_buffer: 1411 +index_buffer: 1410 +index_buffer: 1411 +index_buffer: 1412 +index_buffer: 1412 +index_buffer: 1411 +index_buffer: 1413 +index_buffer: 1412 +index_buffer: 1413 +index_buffer: 1414 +index_buffer: 1415 +index_buffer: 1416 +index_buffer: 1417 +index_buffer: 1415 +index_buffer: 1417 +index_buffer: 1418 +index_buffer: 1416 +index_buffer: 1419 +index_buffer: 1420 +index_buffer: 1416 +index_buffer: 1420 +index_buffer: 1417 +index_buffer: 1421 +index_buffer: 1422 +index_buffer: 1423 +index_buffer: 1421 +index_buffer: 1423 +index_buffer: 1424 +index_buffer: 1424 +index_buffer: 1423 +index_buffer: 1425 +index_buffer: 1424 +index_buffer: 1425 +index_buffer: 1426 +index_buffer: 1425 +index_buffer: 1427 +index_buffer: 1428 +index_buffer: 1425 +index_buffer: 1428 +index_buffer: 1426 +index_buffer: 1427 +index_buffer: 1429 +index_buffer: 1430 +index_buffer: 1427 +index_buffer: 1430 +index_buffer: 1428 +index_buffer: 1430 +index_buffer: 1429 +index_buffer: 1431 +index_buffer: 1430 +index_buffer: 1431 +index_buffer: 1432 +index_buffer: 1432 +index_buffer: 1431 +index_buffer: 1433 +index_buffer: 1432 +index_buffer: 1433 +index_buffer: 1434 +index_buffer: 1434 +index_buffer: 1433 +index_buffer: 1435 +index_buffer: 1434 +index_buffer: 1435 +index_buffer: 1436 +index_buffer: 1436 +index_buffer: 1435 +index_buffer: 945 +index_buffer: 1436 +index_buffer: 945 +index_buffer: 946 +index_buffer: 8 +index_buffer: 922 +index_buffer: 1437 +index_buffer: 8 +index_buffer: 1437 +index_buffer: 1438 +index_buffer: 1438 +index_buffer: 1437 +index_buffer: 1439 +index_buffer: 1438 +index_buffer: 1439 +index_buffer: 919 +index_buffer: 916 +index_buffer: 1440 +index_buffer: 1441 +index_buffer: 916 +index_buffer: 1441 +index_buffer: 1257 +index_buffer: 1441 +index_buffer: 1442 +index_buffer: 1258 +index_buffer: 1441 +index_buffer: 1258 +index_buffer: 1257 +index_buffer: 1258 +index_buffer: 1442 +index_buffer: 1443 +index_buffer: 1258 +index_buffer: 1443 +index_buffer: 1259 +index_buffer: 1444 +index_buffer: 1260 +index_buffer: 1259 +index_buffer: 1444 +index_buffer: 1259 +index_buffer: 1443 +index_buffer: 1444 +index_buffer: 1445 +index_buffer: 1261 +index_buffer: 1444 +index_buffer: 1261 +index_buffer: 1260 +index_buffer: 1445 +index_buffer: 1446 +index_buffer: 1262 +index_buffer: 1445 +index_buffer: 1262 +index_buffer: 1261 +index_buffer: 1262 +index_buffer: 1446 +index_buffer: 1447 +index_buffer: 1262 +index_buffer: 1447 +index_buffer: 1263 +index_buffer: 1263 +index_buffer: 1447 +index_buffer: 1448 +index_buffer: 1263 +index_buffer: 1448 +index_buffer: 1264 +index_buffer: 1264 +index_buffer: 1448 +index_buffer: 1449 +index_buffer: 1264 +index_buffer: 1449 +index_buffer: 1265 +index_buffer: 1265 +index_buffer: 1449 +index_buffer: 1406 +index_buffer: 1265 +index_buffer: 1406 +index_buffer: 1266 +index_buffer: 1450 +index_buffer: 1451 +index_buffer: 1452 +index_buffer: 1450 +index_buffer: 1452 +index_buffer: 1453 +index_buffer: 1451 +index_buffer: 1454 +index_buffer: 952 +index_buffer: 1451 +index_buffer: 952 +index_buffer: 1452 +index_buffer: 1412 +index_buffer: 963 +index_buffer: 964 +index_buffer: 1412 +index_buffer: 964 +index_buffer: 1410 +index_buffer: 1414 +index_buffer: 965 +index_buffer: 963 +index_buffer: 1414 +index_buffer: 963 +index_buffer: 1412 +index_buffer: 1418 +index_buffer: 1417 +index_buffer: 973 +index_buffer: 1418 +index_buffer: 973 +index_buffer: 972 +index_buffer: 1417 +index_buffer: 1420 +index_buffer: 974 +index_buffer: 1417 +index_buffer: 974 +index_buffer: 973 +index_buffer: 981 +index_buffer: 1421 +index_buffer: 1424 +index_buffer: 981 +index_buffer: 1424 +index_buffer: 982 +index_buffer: 982 +index_buffer: 1424 +index_buffer: 1426 +index_buffer: 982 +index_buffer: 1426 +index_buffer: 983 +index_buffer: 983 +index_buffer: 1426 +index_buffer: 1428 +index_buffer: 983 +index_buffer: 1428 +index_buffer: 988 +index_buffer: 988 +index_buffer: 1428 +index_buffer: 1430 +index_buffer: 988 +index_buffer: 1430 +index_buffer: 989 +index_buffer: 989 +index_buffer: 1430 +index_buffer: 1432 +index_buffer: 989 +index_buffer: 1432 +index_buffer: 994 +index_buffer: 994 +index_buffer: 1432 +index_buffer: 1434 +index_buffer: 994 +index_buffer: 1434 +index_buffer: 995 +index_buffer: 995 +index_buffer: 1434 +index_buffer: 1436 +index_buffer: 995 +index_buffer: 1436 +index_buffer: 1000 +index_buffer: 1000 +index_buffer: 1436 +index_buffer: 946 +index_buffer: 1000 +index_buffer: 946 +index_buffer: 1001 +index_buffer: 1455 +index_buffer: 1456 +index_buffer: 1457 +index_buffer: 1455 +index_buffer: 1457 +index_buffer: 1458 +index_buffer: 1459 +index_buffer: 1046 +index_buffer: 1047 +index_buffer: 1459 +index_buffer: 1047 +index_buffer: 958 +index_buffer: 1459 +index_buffer: 1460 +index_buffer: 1048 +index_buffer: 1459 +index_buffer: 1048 +index_buffer: 1046 +index_buffer: 1068 +index_buffer: 1461 +index_buffer: 1462 +index_buffer: 1068 +index_buffer: 1462 +index_buffer: 1069 +index_buffer: 1069 +index_buffer: 1462 +index_buffer: 1463 +index_buffer: 1069 +index_buffer: 1463 +index_buffer: 1070 +index_buffer: 1464 +index_buffer: 1465 +index_buffer: 1466 +index_buffer: 1464 +index_buffer: 1466 +index_buffer: 1467 +index_buffer: 1158 +index_buffer: 1157 +index_buffer: 1465 +index_buffer: 1158 +index_buffer: 1465 +index_buffer: 1464 +index_buffer: 1157 +index_buffer: 914 +index_buffer: 947 +index_buffer: 1157 +index_buffer: 947 +index_buffer: 1465 +index_buffer: 1466 +index_buffer: 1465 +index_buffer: 947 +index_buffer: 1466 +index_buffer: 947 +index_buffer: 951 +index_buffer: 1468 +index_buffer: 1273 +index_buffer: 1274 +index_buffer: 1468 +index_buffer: 1274 +index_buffer: 1469 +index_buffer: 1470 +index_buffer: 1471 +index_buffer: 1468 +index_buffer: 1470 +index_buffer: 1468 +index_buffer: 1469 +index_buffer: 1355 +index_buffer: 1358 +index_buffer: 1361 +index_buffer: 1355 +index_buffer: 1361 +index_buffer: 1356 +index_buffer: 1472 +index_buffer: 1473 +index_buffer: 1474 +index_buffer: 1472 +index_buffer: 1474 +index_buffer: 1475 +index_buffer: 1473 +index_buffer: 1280 +index_buffer: 1287 +index_buffer: 1473 +index_buffer: 1287 +index_buffer: 1474 +index_buffer: 1287 +index_buffer: 1391 +index_buffer: 1476 +index_buffer: 1287 +index_buffer: 1476 +index_buffer: 1474 +index_buffer: 1475 +index_buffer: 1474 +index_buffer: 1476 +index_buffer: 1475 +index_buffer: 1476 +index_buffer: 1477 +index_buffer: 919 +index_buffer: 920 +index_buffer: 1478 +index_buffer: 919 +index_buffer: 1478 +index_buffer: 1438 +index_buffer: 1438 +index_buffer: 1478 +index_buffer: 9 +index_buffer: 1438 +index_buffer: 9 +index_buffer: 8 +index_buffer: 1479 +index_buffer: 1480 +index_buffer: 1481 +index_buffer: 1479 +index_buffer: 1481 +index_buffer: 1062 +index_buffer: 1482 +index_buffer: 1480 +index_buffer: 1479 +index_buffer: 1482 +index_buffer: 1479 +index_buffer: 1072 +index_buffer: 1482 +index_buffer: 1483 +index_buffer: 1484 +index_buffer: 1482 +index_buffer: 1484 +index_buffer: 1480 +index_buffer: 1485 +index_buffer: 1061 +index_buffer: 1060 +index_buffer: 1485 +index_buffer: 1060 +index_buffer: 1486 +index_buffer: 1486 +index_buffer: 1065 +index_buffer: 1064 +index_buffer: 1486 +index_buffer: 1064 +index_buffer: 1485 +index_buffer: 1479 +index_buffer: 1485 +index_buffer: 1064 +index_buffer: 1479 +index_buffer: 1064 +index_buffer: 1072 +index_buffer: 1062 +index_buffer: 1061 +index_buffer: 1485 +index_buffer: 1062 +index_buffer: 1485 +index_buffer: 1479 +index_buffer: 1250 +index_buffer: 1249 +index_buffer: 1487 +index_buffer: 1250 +index_buffer: 1487 +index_buffer: 1488 +index_buffer: 1487 +index_buffer: 1167 +index_buffer: 1170 +index_buffer: 1487 +index_buffer: 1170 +index_buffer: 1488 +index_buffer: 1253 +index_buffer: 1489 +index_buffer: 1490 +index_buffer: 1253 +index_buffer: 1490 +index_buffer: 1254 +index_buffer: 1489 +index_buffer: 1036 +index_buffer: 1039 +index_buffer: 1489 +index_buffer: 1039 +index_buffer: 1490 +index_buffer: 1478 +index_buffer: 1490 +index_buffer: 1039 +index_buffer: 1478 +index_buffer: 1039 +index_buffer: 9 +index_buffer: 920 +index_buffer: 1254 +index_buffer: 1490 +index_buffer: 920 +index_buffer: 1490 +index_buffer: 1478 +index_buffer: 1379 +index_buffer: 1378 +index_buffer: 1491 +index_buffer: 1379 +index_buffer: 1491 +index_buffer: 1492 +index_buffer: 1491 +index_buffer: 1381 +index_buffer: 1380 +index_buffer: 1491 +index_buffer: 1380 +index_buffer: 1492 +index_buffer: 915 +index_buffer: 1493 +index_buffer: 1440 +index_buffer: 915 +index_buffer: 1440 +index_buffer: 916 +index_buffer: 919 +index_buffer: 1439 +index_buffer: 1493 +index_buffer: 919 +index_buffer: 1493 +index_buffer: 915 +index_buffer: 1169 +index_buffer: 1172 +index_buffer: 1442 +index_buffer: 1169 +index_buffer: 1442 +index_buffer: 1441 +index_buffer: 1440 +index_buffer: 1170 +index_buffer: 1169 +index_buffer: 1440 +index_buffer: 1169 +index_buffer: 1441 +index_buffer: 1174 +index_buffer: 1176 +index_buffer: 1444 +index_buffer: 1174 +index_buffer: 1444 +index_buffer: 1443 +index_buffer: 1442 +index_buffer: 1172 +index_buffer: 1174 +index_buffer: 1442 +index_buffer: 1174 +index_buffer: 1443 +index_buffer: 1178 +index_buffer: 1180 +index_buffer: 1446 +index_buffer: 1178 +index_buffer: 1446 +index_buffer: 1445 +index_buffer: 1178 +index_buffer: 1445 +index_buffer: 1444 +index_buffer: 1178 +index_buffer: 1444 +index_buffer: 1176 +index_buffer: 1447 +index_buffer: 1182 +index_buffer: 1184 +index_buffer: 1447 +index_buffer: 1184 +index_buffer: 1448 +index_buffer: 1446 +index_buffer: 1180 +index_buffer: 1182 +index_buffer: 1446 +index_buffer: 1182 +index_buffer: 1447 +index_buffer: 1449 +index_buffer: 1186 +index_buffer: 1188 +index_buffer: 1449 +index_buffer: 1188 +index_buffer: 1406 +index_buffer: 1448 +index_buffer: 1184 +index_buffer: 1186 +index_buffer: 1448 +index_buffer: 1186 +index_buffer: 1449 +index_buffer: 1190 +index_buffer: 1192 +index_buffer: 1409 +index_buffer: 1190 +index_buffer: 1409 +index_buffer: 1407 +index_buffer: 1406 +index_buffer: 1188 +index_buffer: 1190 +index_buffer: 1406 +index_buffer: 1190 +index_buffer: 1407 +index_buffer: 1494 +index_buffer: 1495 +index_buffer: 1197 +index_buffer: 1494 +index_buffer: 1197 +index_buffer: 1415 +index_buffer: 1196 +index_buffer: 1495 +index_buffer: 1494 +index_buffer: 1196 +index_buffer: 1494 +index_buffer: 1413 +index_buffer: 1496 +index_buffer: 1206 +index_buffer: 1422 +index_buffer: 1496 +index_buffer: 1422 +index_buffer: 1497 +index_buffer: 1419 +index_buffer: 1202 +index_buffer: 1496 +index_buffer: 1419 +index_buffer: 1496 +index_buffer: 1497 +index_buffer: 1423 +index_buffer: 1205 +index_buffer: 1208 +index_buffer: 1423 +index_buffer: 1208 +index_buffer: 1425 +index_buffer: 1206 +index_buffer: 1205 +index_buffer: 1423 +index_buffer: 1206 +index_buffer: 1423 +index_buffer: 1422 +index_buffer: 1427 +index_buffer: 1210 +index_buffer: 1212 +index_buffer: 1427 +index_buffer: 1212 +index_buffer: 1429 +index_buffer: 1425 +index_buffer: 1208 +index_buffer: 1210 +index_buffer: 1425 +index_buffer: 1210 +index_buffer: 1427 +index_buffer: 1431 +index_buffer: 1214 +index_buffer: 1216 +index_buffer: 1431 +index_buffer: 1216 +index_buffer: 1433 +index_buffer: 1429 +index_buffer: 1212 +index_buffer: 1214 +index_buffer: 1429 +index_buffer: 1214 +index_buffer: 1431 +index_buffer: 1435 +index_buffer: 1218 +index_buffer: 1220 +index_buffer: 1435 +index_buffer: 1220 +index_buffer: 945 +index_buffer: 1433 +index_buffer: 1216 +index_buffer: 1218 +index_buffer: 1433 +index_buffer: 1218 +index_buffer: 1435 +index_buffer: 1222 +index_buffer: 1224 +index_buffer: 941 +index_buffer: 1222 +index_buffer: 941 +index_buffer: 943 +index_buffer: 1220 +index_buffer: 1222 +index_buffer: 943 +index_buffer: 1220 +index_buffer: 943 +index_buffer: 945 +index_buffer: 939 +index_buffer: 1226 +index_buffer: 1228 +index_buffer: 939 +index_buffer: 1228 +index_buffer: 937 +index_buffer: 1224 +index_buffer: 1226 +index_buffer: 939 +index_buffer: 1224 +index_buffer: 939 +index_buffer: 941 +index_buffer: 1230 +index_buffer: 1232 +index_buffer: 934 +index_buffer: 1230 +index_buffer: 934 +index_buffer: 936 +index_buffer: 1228 +index_buffer: 1230 +index_buffer: 936 +index_buffer: 1228 +index_buffer: 936 +index_buffer: 937 +index_buffer: 930 +index_buffer: 1234 +index_buffer: 1236 +index_buffer: 930 +index_buffer: 1236 +index_buffer: 931 +index_buffer: 934 +index_buffer: 1232 +index_buffer: 1234 +index_buffer: 934 +index_buffer: 1234 +index_buffer: 930 +index_buffer: 1498 +index_buffer: 1244 +index_buffer: 924 +index_buffer: 1498 +index_buffer: 924 +index_buffer: 925 +index_buffer: 928 +index_buffer: 1240 +index_buffer: 1498 +index_buffer: 928 +index_buffer: 1498 +index_buffer: 925 +index_buffer: 921 +index_buffer: 1243 +index_buffer: 1246 +index_buffer: 921 +index_buffer: 1246 +index_buffer: 922 +index_buffer: 1244 +index_buffer: 1243 +index_buffer: 921 +index_buffer: 1244 +index_buffer: 921 +index_buffer: 924 +index_buffer: 1248 +index_buffer: 1250 +index_buffer: 1439 +index_buffer: 1248 +index_buffer: 1439 +index_buffer: 1437 +index_buffer: 1437 +index_buffer: 922 +index_buffer: 1246 +index_buffer: 1437 +index_buffer: 1246 +index_buffer: 1248 +index_buffer: 1493 +index_buffer: 1488 +index_buffer: 1170 +index_buffer: 1493 +index_buffer: 1170 +index_buffer: 1440 +index_buffer: 1439 +index_buffer: 1250 +index_buffer: 1488 +index_buffer: 1439 +index_buffer: 1488 +index_buffer: 1493 +index_buffer: 1499 +index_buffer: 928 +index_buffer: 927 +index_buffer: 1499 +index_buffer: 927 +index_buffer: 1500 +index_buffer: 931 +index_buffer: 1499 +index_buffer: 1500 +index_buffer: 931 +index_buffer: 1500 +index_buffer: 932 +index_buffer: 1501 +index_buffer: 1502 +index_buffer: 1106 +index_buffer: 1501 +index_buffer: 1106 +index_buffer: 1107 +index_buffer: 1099 +index_buffer: 1098 +index_buffer: 1502 +index_buffer: 1099 +index_buffer: 1502 +index_buffer: 1501 +index_buffer: 1239 +index_buffer: 1503 +index_buffer: 1498 +index_buffer: 1239 +index_buffer: 1498 +index_buffer: 1240 +index_buffer: 1503 +index_buffer: 1241 +index_buffer: 1244 +index_buffer: 1503 +index_buffer: 1244 +index_buffer: 1498 +index_buffer: 1504 +index_buffer: 1505 +index_buffer: 1506 +index_buffer: 1504 +index_buffer: 1506 +index_buffer: 1507 +index_buffer: 1505 +index_buffer: 1504 +index_buffer: 1508 +index_buffer: 1505 +index_buffer: 1508 +index_buffer: 1509 +index_buffer: 1510 +index_buffer: 1511 +index_buffer: 1505 +index_buffer: 1510 +index_buffer: 1505 +index_buffer: 1509 +index_buffer: 1505 +index_buffer: 1511 +index_buffer: 1295 +index_buffer: 1505 +index_buffer: 1295 +index_buffer: 1506 +index_buffer: 1512 +index_buffer: 1500 +index_buffer: 927 +index_buffer: 1512 +index_buffer: 927 +index_buffer: 1397 +index_buffer: 1398 +index_buffer: 932 +index_buffer: 1500 +index_buffer: 1398 +index_buffer: 1500 +index_buffer: 1512 +index_buffer: 1293 +index_buffer: 1291 +index_buffer: 1513 +index_buffer: 1293 +index_buffer: 1513 +index_buffer: 1514 +index_buffer: 1515 +index_buffer: 1513 +index_buffer: 1291 +index_buffer: 1515 +index_buffer: 1291 +index_buffer: 1290 +index_buffer: 1515 +index_buffer: 1398 +index_buffer: 1512 +index_buffer: 1515 +index_buffer: 1512 +index_buffer: 1513 +index_buffer: 1513 +index_buffer: 1512 +index_buffer: 1397 +index_buffer: 1513 +index_buffer: 1397 +index_buffer: 1514 +index_buffer: 1240 +index_buffer: 928 +index_buffer: 1499 +index_buffer: 1240 +index_buffer: 1499 +index_buffer: 1238 +index_buffer: 1236 +index_buffer: 1238 +index_buffer: 1499 +index_buffer: 1236 +index_buffer: 1499 +index_buffer: 931 +index_buffer: 894 +index_buffer: 899 +index_buffer: 895 +index_buffer: 894 +index_buffer: 895 +index_buffer: 890 +index_buffer: 897 +index_buffer: 900 +index_buffer: 905 +index_buffer: 897 +index_buffer: 905 +index_buffer: 901 +index_buffer: 903 +index_buffer: 906 +index_buffer: 911 +index_buffer: 903 +index_buffer: 911 +index_buffer: 907 +index_buffer: 909 +index_buffer: 912 +index_buffer: 949 +index_buffer: 909 +index_buffer: 949 +index_buffer: 913 +index_buffer: 952 +index_buffer: 948 +index_buffer: 950 +index_buffer: 952 +index_buffer: 950 +index_buffer: 1452 +index_buffer: 1063 +index_buffer: 1033 +index_buffer: 889 +index_buffer: 1063 +index_buffer: 889 +index_buffer: 892 +index_buffer: 1062 +index_buffer: 1481 +index_buffer: 1033 +index_buffer: 1062 +index_buffer: 1033 +index_buffer: 1063 +index_buffer: 1481 +index_buffer: 1034 +index_buffer: 1030 +index_buffer: 1481 +index_buffer: 1030 +index_buffer: 1033 +index_buffer: 1516 +index_buffer: 1517 +index_buffer: 1518 +index_buffer: 1516 +index_buffer: 1518 +index_buffer: 1519 +index_buffer: 1518 +index_buffer: 1456 +index_buffer: 1455 +index_buffer: 1518 +index_buffer: 1455 +index_buffer: 1519 +index_buffer: 1041 +index_buffer: 1519 +index_buffer: 1455 +index_buffer: 1041 +index_buffer: 1455 +index_buffer: 1520 +index_buffer: 283 +index_buffer: 1154 +index_buffer: 1051 +index_buffer: 283 +index_buffer: 1051 +index_buffer: 177 +index_buffer: 1078 +index_buffer: 1521 +index_buffer: 1079 +index_buffer: 1078 +index_buffer: 1079 +index_buffer: 1074 +index_buffer: 1522 +index_buffer: 1385 +index_buffer: 1384 +index_buffer: 1522 +index_buffer: 1384 +index_buffer: 1523 +index_buffer: 1524 +index_buffer: 1525 +index_buffer: 1526 +index_buffer: 1524 +index_buffer: 1526 +index_buffer: 1527 +index_buffer: 1525 +index_buffer: 1528 +index_buffer: 1529 +index_buffer: 1525 +index_buffer: 1529 +index_buffer: 1526 +index_buffer: 1529 +index_buffer: 1530 +index_buffer: 1282 +index_buffer: 1529 +index_buffer: 1282 +index_buffer: 1526 +index_buffer: 1454 +index_buffer: 1531 +index_buffer: 1532 +index_buffer: 1454 +index_buffer: 1532 +index_buffer: 1533 +index_buffer: 1534 +index_buffer: 1531 +index_buffer: 1454 +index_buffer: 1534 +index_buffer: 1454 +index_buffer: 1451 +index_buffer: 1535 +index_buffer: 1283 +index_buffer: 1281 +index_buffer: 1535 +index_buffer: 1281 +index_buffer: 1536 +index_buffer: 1537 +index_buffer: 1538 +index_buffer: 1539 +index_buffer: 1537 +index_buffer: 1539 +index_buffer: 1535 +index_buffer: 1480 +index_buffer: 1484 +index_buffer: 1034 +index_buffer: 1480 +index_buffer: 1034 +index_buffer: 1481 +index_buffer: 1540 +index_buffer: 1541 +index_buffer: 1542 +index_buffer: 1540 +index_buffer: 1542 +index_buffer: 1543 +index_buffer: 1540 +index_buffer: 1467 +index_buffer: 1466 +index_buffer: 1540 +index_buffer: 1466 +index_buffer: 1541 +index_buffer: 1541 +index_buffer: 1466 +index_buffer: 951 +index_buffer: 1541 +index_buffer: 951 +index_buffer: 1533 +index_buffer: 1542 +index_buffer: 1541 +index_buffer: 1533 +index_buffer: 1542 +index_buffer: 1533 +index_buffer: 1532 +index_buffer: 1544 +index_buffer: 1534 +index_buffer: 1451 +index_buffer: 1544 +index_buffer: 1451 +index_buffer: 1450 +index_buffer: 1539 +index_buffer: 1545 +index_buffer: 1524 +index_buffer: 1539 +index_buffer: 1524 +index_buffer: 1527 +index_buffer: 1538 +index_buffer: 1546 +index_buffer: 1545 +index_buffer: 1538 +index_buffer: 1545 +index_buffer: 1539 +index_buffer: 957 +index_buffer: 1547 +index_buffer: 1459 +index_buffer: 957 +index_buffer: 1459 +index_buffer: 958 +index_buffer: 1547 +index_buffer: 1548 +index_buffer: 1460 +index_buffer: 1547 +index_buffer: 1460 +index_buffer: 1459 +index_buffer: 1549 +index_buffer: 1550 +index_buffer: 1551 +index_buffer: 1549 +index_buffer: 1551 +index_buffer: 1277 +index_buffer: 1391 +index_buffer: 1389 +index_buffer: 1552 +index_buffer: 1391 +index_buffer: 1552 +index_buffer: 1476 +index_buffer: 1389 +index_buffer: 1388 +index_buffer: 1553 +index_buffer: 1389 +index_buffer: 1553 +index_buffer: 1552 +index_buffer: 1553 +index_buffer: 1554 +index_buffer: 1555 +index_buffer: 1553 +index_buffer: 1555 +index_buffer: 1552 +index_buffer: 1476 +index_buffer: 1552 +index_buffer: 1555 +index_buffer: 1476 +index_buffer: 1555 +index_buffer: 1477 +index_buffer: 1555 +index_buffer: 1556 +index_buffer: 1396 +index_buffer: 1555 +index_buffer: 1396 +index_buffer: 1477 +index_buffer: 1555 +index_buffer: 1554 +index_buffer: 1557 +index_buffer: 1555 +index_buffer: 1557 +index_buffer: 1556 +index_buffer: 1556 +index_buffer: 1557 +index_buffer: 1293 +index_buffer: 1556 +index_buffer: 1293 +index_buffer: 1514 +index_buffer: 1556 +index_buffer: 1514 +index_buffer: 1397 +index_buffer: 1556 +index_buffer: 1397 +index_buffer: 1396 +index_buffer: 1399 +index_buffer: 1398 +index_buffer: 1515 +index_buffer: 1399 +index_buffer: 1515 +index_buffer: 1558 +index_buffer: 1558 +index_buffer: 1515 +index_buffer: 1290 +index_buffer: 1558 +index_buffer: 1290 +index_buffer: 1300 +index_buffer: 1299 +index_buffer: 1559 +index_buffer: 1558 +index_buffer: 1299 +index_buffer: 1558 +index_buffer: 1300 +index_buffer: 1400 +index_buffer: 1399 +index_buffer: 1558 +index_buffer: 1400 +index_buffer: 1558 +index_buffer: 1559 +index_buffer: 1401 +index_buffer: 1400 +index_buffer: 1559 +index_buffer: 1401 +index_buffer: 1559 +index_buffer: 1560 +index_buffer: 1560 +index_buffer: 1559 +index_buffer: 1299 +index_buffer: 1560 +index_buffer: 1299 +index_buffer: 1306 +index_buffer: 1305 +index_buffer: 1561 +index_buffer: 1560 +index_buffer: 1305 +index_buffer: 1560 +index_buffer: 1306 +index_buffer: 1561 +index_buffer: 1402 +index_buffer: 1401 +index_buffer: 1561 +index_buffer: 1401 +index_buffer: 1560 +index_buffer: 1562 +index_buffer: 1403 +index_buffer: 1402 +index_buffer: 1562 +index_buffer: 1402 +index_buffer: 1561 +index_buffer: 1562 +index_buffer: 1561 +index_buffer: 1305 +index_buffer: 1562 +index_buffer: 1305 +index_buffer: 1311 +index_buffer: 1310 +index_buffer: 1563 +index_buffer: 1562 +index_buffer: 1310 +index_buffer: 1562 +index_buffer: 1311 +index_buffer: 1563 +index_buffer: 1404 +index_buffer: 1403 +index_buffer: 1563 +index_buffer: 1403 +index_buffer: 1562 +index_buffer: 1564 +index_buffer: 1405 +index_buffer: 1404 +index_buffer: 1564 +index_buffer: 1404 +index_buffer: 1563 +index_buffer: 1317 +index_buffer: 1564 +index_buffer: 1563 +index_buffer: 1317 +index_buffer: 1563 +index_buffer: 1310 +index_buffer: 1003 +index_buffer: 1002 +index_buffer: 1564 +index_buffer: 1003 +index_buffer: 1564 +index_buffer: 1317 +index_buffer: 1002 +index_buffer: 1001 +index_buffer: 1405 +index_buffer: 1002 +index_buffer: 1405 +index_buffer: 1564 +index_buffer: 1454 +index_buffer: 1533 +index_buffer: 951 +index_buffer: 1454 +index_buffer: 951 +index_buffer: 952 +index_buffer: 1565 +index_buffer: 1566 +index_buffer: 1472 +index_buffer: 1565 +index_buffer: 1472 +index_buffer: 5 +index_buffer: 9 +index_buffer: 1042 +index_buffer: 1565 +index_buffer: 9 +index_buffer: 1565 +index_buffer: 5 +index_buffer: 1042 +index_buffer: 1520 +index_buffer: 1567 +index_buffer: 1042 +index_buffer: 1567 +index_buffer: 1565 +index_buffer: 965 +index_buffer: 1568 +index_buffer: 1569 +index_buffer: 965 +index_buffer: 1569 +index_buffer: 966 +index_buffer: 1568 +index_buffer: 972 +index_buffer: 968 +index_buffer: 1568 +index_buffer: 968 +index_buffer: 1569 +index_buffer: 1569 +index_buffer: 968 +index_buffer: 971 +index_buffer: 1569 +index_buffer: 971 +index_buffer: 1570 +index_buffer: 1569 +index_buffer: 1570 +index_buffer: 967 +index_buffer: 1569 +index_buffer: 967 +index_buffer: 966 +index_buffer: 1150 +index_buffer: 1571 +index_buffer: 1572 +index_buffer: 1150 +index_buffer: 1572 +index_buffer: 1151 +index_buffer: 1571 +index_buffer: 1142 +index_buffer: 1141 +index_buffer: 1571 +index_buffer: 1141 +index_buffer: 1572 +index_buffer: 1195 +index_buffer: 1573 +index_buffer: 1495 +index_buffer: 1195 +index_buffer: 1495 +index_buffer: 1196 +index_buffer: 1573 +index_buffer: 1198 +index_buffer: 1197 +index_buffer: 1573 +index_buffer: 1197 +index_buffer: 1495 +index_buffer: 1574 +index_buffer: 1345 +index_buffer: 967 +index_buffer: 1574 +index_buffer: 967 +index_buffer: 1570 +index_buffer: 1570 +index_buffer: 971 +index_buffer: 1338 +index_buffer: 1570 +index_buffer: 1338 +index_buffer: 1574 +index_buffer: 1342 +index_buffer: 1575 +index_buffer: 1574 +index_buffer: 1342 +index_buffer: 1574 +index_buffer: 1338 +index_buffer: 1575 +index_buffer: 1346 +index_buffer: 1345 +index_buffer: 1575 +index_buffer: 1345 +index_buffer: 1574 +index_buffer: 1576 +index_buffer: 1494 +index_buffer: 1415 +index_buffer: 1576 +index_buffer: 1415 +index_buffer: 1418 +index_buffer: 1414 +index_buffer: 1413 +index_buffer: 1494 +index_buffer: 1414 +index_buffer: 1494 +index_buffer: 1576 +index_buffer: 1414 +index_buffer: 1576 +index_buffer: 1568 +index_buffer: 1414 +index_buffer: 1568 +index_buffer: 965 +index_buffer: 1576 +index_buffer: 1418 +index_buffer: 972 +index_buffer: 1576 +index_buffer: 972 +index_buffer: 1568 +index_buffer: 1194 +index_buffer: 1196 +index_buffer: 1413 +index_buffer: 1194 +index_buffer: 1413 +index_buffer: 1411 +index_buffer: 1409 +index_buffer: 1192 +index_buffer: 1194 +index_buffer: 1409 +index_buffer: 1194 +index_buffer: 1411 +index_buffer: 713 +index_buffer: 1147 +index_buffer: 1146 +index_buffer: 713 +index_buffer: 1146 +index_buffer: 714 +index_buffer: 1577 +index_buffer: 1578 +index_buffer: 1350 +index_buffer: 1577 +index_buffer: 1350 +index_buffer: 1349 +index_buffer: 1344 +index_buffer: 1578 +index_buffer: 1577 +index_buffer: 1344 +index_buffer: 1577 +index_buffer: 961 +index_buffer: 1344 +index_buffer: 1348 +index_buffer: 1579 +index_buffer: 1344 +index_buffer: 1579 +index_buffer: 1578 +index_buffer: 1578 +index_buffer: 1579 +index_buffer: 1351 +index_buffer: 1578 +index_buffer: 1351 +index_buffer: 1350 +index_buffer: 720 +index_buffer: 1580 +index_buffer: 1581 +index_buffer: 720 +index_buffer: 1581 +index_buffer: 721 +index_buffer: 1349 +index_buffer: 483 +index_buffer: 721 +index_buffer: 1349 +index_buffer: 721 +index_buffer: 1581 +index_buffer: 962 +index_buffer: 1582 +index_buffer: 1583 +index_buffer: 962 +index_buffer: 1583 +index_buffer: 964 +index_buffer: 962 +index_buffer: 961 +index_buffer: 1577 +index_buffer: 962 +index_buffer: 1577 +index_buffer: 1582 +index_buffer: 1582 +index_buffer: 1577 +index_buffer: 1349 +index_buffer: 1582 +index_buffer: 1349 +index_buffer: 1581 +index_buffer: 1582 +index_buffer: 1581 +index_buffer: 1580 +index_buffer: 1582 +index_buffer: 1580 +index_buffer: 1583 +index_buffer: 1267 +index_buffer: 1584 +index_buffer: 1585 +index_buffer: 1267 +index_buffer: 1585 +index_buffer: 1268 +index_buffer: 1585 +index_buffer: 1584 +index_buffer: 1586 +index_buffer: 1585 +index_buffer: 1586 +index_buffer: 1587 +index_buffer: 1585 +index_buffer: 1587 +index_buffer: 1588 +index_buffer: 1585 +index_buffer: 1588 +index_buffer: 1589 +index_buffer: 1268 +index_buffer: 1585 +index_buffer: 1589 +index_buffer: 1268 +index_buffer: 1589 +index_buffer: 1269 +index_buffer: 1286 +index_buffer: 1390 +index_buffer: 1391 +index_buffer: 1286 +index_buffer: 1391 +index_buffer: 1287 +index_buffer: 1286 +index_buffer: 1288 +index_buffer: 1392 +index_buffer: 1286 +index_buffer: 1392 +index_buffer: 1390 +index_buffer: 1590 +index_buffer: 1591 +index_buffer: 1592 +index_buffer: 1590 +index_buffer: 1592 +index_buffer: 1593 +index_buffer: 1593 +index_buffer: 1592 +index_buffer: 1394 +index_buffer: 1593 +index_buffer: 1394 +index_buffer: 1594 +index_buffer: 1593 +index_buffer: 1594 +index_buffer: 1588 +index_buffer: 1593 +index_buffer: 1588 +index_buffer: 1595 +index_buffer: 1590 +index_buffer: 1593 +index_buffer: 1595 +index_buffer: 1590 +index_buffer: 1595 +index_buffer: 1095 +index_buffer: 1075 +index_buffer: 1596 +index_buffer: 1597 +index_buffer: 1075 +index_buffer: 1597 +index_buffer: 1076 +index_buffer: 1596 +index_buffer: 1075 +index_buffer: 1080 +index_buffer: 1596 +index_buffer: 1080 +index_buffer: 1598 +index_buffer: 1599 +index_buffer: 1596 +index_buffer: 1598 +index_buffer: 1599 +index_buffer: 1598 +index_buffer: 1600 +index_buffer: 1551 +index_buffer: 1601 +index_buffer: 1277 +index_buffer: 1601 +index_buffer: 1551 +index_buffer: 1528 +index_buffer: 1601 +index_buffer: 1528 +index_buffer: 1525 +index_buffer: 1275 +index_buffer: 1601 +index_buffer: 1525 +index_buffer: 1275 +index_buffer: 1525 +index_buffer: 1524 +index_buffer: 1276 +index_buffer: 1275 +index_buffer: 1524 +index_buffer: 1276 +index_buffer: 1524 +index_buffer: 1545 +index_buffer: 1602 +index_buffer: 1276 +index_buffer: 1545 +index_buffer: 1602 +index_buffer: 1545 +index_buffer: 1546 +index_buffer: 1457 +index_buffer: 1603 +index_buffer: 1602 +index_buffer: 1457 +index_buffer: 1602 +index_buffer: 1546 +index_buffer: 1456 +index_buffer: 1471 +index_buffer: 1603 +index_buffer: 1456 +index_buffer: 1603 +index_buffer: 1457 +index_buffer: 1471 +index_buffer: 1456 +index_buffer: 1518 +index_buffer: 1471 +index_buffer: 1518 +index_buffer: 1468 +index_buffer: 1518 +index_buffer: 1517 +index_buffer: 1273 +index_buffer: 1518 +index_buffer: 1273 +index_buffer: 1468 +index_buffer: 1273 +index_buffer: 1517 +index_buffer: 1516 +index_buffer: 1273 +index_buffer: 1516 +index_buffer: 1270 +index_buffer: 1522 +index_buffer: 1271 +index_buffer: 1040 +index_buffer: 1522 +index_buffer: 1040 +index_buffer: 1037 +index_buffer: 1489 +index_buffer: 1383 +index_buffer: 1385 +index_buffer: 1489 +index_buffer: 1385 +index_buffer: 1036 +index_buffer: 1253 +index_buffer: 1380 +index_buffer: 1383 +index_buffer: 1253 +index_buffer: 1383 +index_buffer: 1489 +index_buffer: 1252 +index_buffer: 1492 +index_buffer: 1380 +index_buffer: 1252 +index_buffer: 1380 +index_buffer: 1253 +index_buffer: 1379 +index_buffer: 1492 +index_buffer: 1252 +index_buffer: 1379 +index_buffer: 1252 +index_buffer: 1006 +index_buffer: 1379 +index_buffer: 1006 +index_buffer: 1005 +index_buffer: 1379 +index_buffer: 1005 +index_buffer: 1377 +index_buffer: 1011 +index_buffer: 1375 +index_buffer: 1377 +index_buffer: 1011 +index_buffer: 1377 +index_buffer: 1005 +index_buffer: 1375 +index_buffer: 1011 +index_buffer: 1013 +index_buffer: 1375 +index_buffer: 1013 +index_buffer: 1373 +index_buffer: 1017 +index_buffer: 1371 +index_buffer: 1373 +index_buffer: 1017 +index_buffer: 1373 +index_buffer: 1013 +index_buffer: 1017 +index_buffer: 1019 +index_buffer: 1369 +index_buffer: 1017 +index_buffer: 1369 +index_buffer: 1371 +index_buffer: 1019 +index_buffer: 1023 +index_buffer: 1367 +index_buffer: 1019 +index_buffer: 1367 +index_buffer: 1369 +index_buffer: 1367 +index_buffer: 1023 +index_buffer: 1025 +index_buffer: 1367 +index_buffer: 1025 +index_buffer: 1365 +index_buffer: 1363 +index_buffer: 1365 +index_buffer: 1025 +index_buffer: 1363 +index_buffer: 1025 +index_buffer: 1029 +index_buffer: 1363 +index_buffer: 1029 +index_buffer: 1044 +index_buffer: 1363 +index_buffer: 1044 +index_buffer: 1360 +index_buffer: 1361 +index_buffer: 1360 +index_buffer: 1044 +index_buffer: 1361 +index_buffer: 1044 +index_buffer: 1048 +index_buffer: 1460 +index_buffer: 1356 +index_buffer: 1361 +index_buffer: 1460 +index_buffer: 1361 +index_buffer: 1048 +index_buffer: 1548 +index_buffer: 1357 +index_buffer: 1356 +index_buffer: 1548 +index_buffer: 1356 +index_buffer: 1460 +index_buffer: 386 +index_buffer: 489 +index_buffer: 1353 +index_buffer: 386 +index_buffer: 1353 +index_buffer: 1256 +index_buffer: 1511 +index_buffer: 1296 +index_buffer: 1295 +index_buffer: 1296 +index_buffer: 1511 +index_buffer: 1510 +index_buffer: 1296 +index_buffer: 1510 +index_buffer: 1501 +index_buffer: 1105 +index_buffer: 1104 +index_buffer: 1303 +index_buffer: 1105 +index_buffer: 1303 +index_buffer: 1302 +index_buffer: 1104 +index_buffer: 1108 +index_buffer: 1308 +index_buffer: 1104 +index_buffer: 1308 +index_buffer: 1303 +index_buffer: 1309 +index_buffer: 1111 +index_buffer: 1112 +index_buffer: 1309 +index_buffer: 1112 +index_buffer: 1314 +index_buffer: 1115 +index_buffer: 1119 +index_buffer: 1319 +index_buffer: 1115 +index_buffer: 1319 +index_buffer: 1315 +index_buffer: 1320 +index_buffer: 1118 +index_buffer: 1120 +index_buffer: 1320 +index_buffer: 1120 +index_buffer: 1323 +index_buffer: 1123 +index_buffer: 1124 +index_buffer: 1327 +index_buffer: 1123 +index_buffer: 1327 +index_buffer: 1324 +index_buffer: 1328 +index_buffer: 1127 +index_buffer: 1128 +index_buffer: 1328 +index_buffer: 1128 +index_buffer: 1331 +index_buffer: 1332 +index_buffer: 1131 +index_buffer: 1133 +index_buffer: 1332 +index_buffer: 1133 +index_buffer: 1335 +index_buffer: 1604 +index_buffer: 1336 +index_buffer: 1132 +index_buffer: 1604 +index_buffer: 1132 +index_buffer: 1137 +index_buffer: 1142 +index_buffer: 1571 +index_buffer: 1347 +index_buffer: 1142 +index_buffer: 1347 +index_buffer: 1346 +index_buffer: 1351 +index_buffer: 1147 +index_buffer: 713 +index_buffer: 1351 +index_buffer: 713 +index_buffer: 484 +index_buffer: 1145 +index_buffer: 1143 +index_buffer: 1575 +index_buffer: 1145 +index_buffer: 1575 +index_buffer: 1342 +index_buffer: 1348 +index_buffer: 1150 +index_buffer: 1148 +index_buffer: 1348 +index_buffer: 1148 +index_buffer: 1579 +index_buffer: 1506 +index_buffer: 1295 +index_buffer: 1294 +index_buffer: 1506 +index_buffer: 1294 +index_buffer: 1605 +index_buffer: 1605 +index_buffer: 1294 +index_buffer: 1293 +index_buffer: 1605 +index_buffer: 1293 +index_buffer: 1557 +index_buffer: 1606 +index_buffer: 1605 +index_buffer: 1557 +index_buffer: 1606 +index_buffer: 1557 +index_buffer: 1554 +index_buffer: 1507 +index_buffer: 1506 +index_buffer: 1605 +index_buffer: 1507 +index_buffer: 1605 +index_buffer: 1606 +index_buffer: 1472 +index_buffer: 1475 +index_buffer: 6 +index_buffer: 1472 +index_buffer: 6 +index_buffer: 5 +index_buffer: 6 +index_buffer: 1475 +index_buffer: 1477 +index_buffer: 6 +index_buffer: 1477 +index_buffer: 1395 +index_buffer: 1607 +index_buffer: 1608 +index_buffer: 1609 +index_buffer: 1607 +index_buffer: 1609 +index_buffer: 1610 +index_buffer: 1608 +index_buffer: 1611 +index_buffer: 1100 +index_buffer: 1608 +index_buffer: 1100 +index_buffer: 1609 +index_buffer: 1612 +index_buffer: 1613 +index_buffer: 1614 +index_buffer: 1612 +index_buffer: 1614 +index_buffer: 1521 +index_buffer: 1528 +index_buffer: 1615 +index_buffer: 1616 +index_buffer: 1528 +index_buffer: 1616 +index_buffer: 1529 +index_buffer: 1080 +index_buffer: 1079 +index_buffer: 1616 +index_buffer: 1080 +index_buffer: 1616 +index_buffer: 1615 +index_buffer: 1085 +index_buffer: 1617 +index_buffer: 1618 +index_buffer: 1085 +index_buffer: 1618 +index_buffer: 1086 +index_buffer: 1618 +index_buffer: 1617 +index_buffer: 1288 +index_buffer: 1618 +index_buffer: 1288 +index_buffer: 1285 +index_buffer: 1619 +index_buffer: 1620 +index_buffer: 1621 +index_buffer: 1619 +index_buffer: 1621 +index_buffer: 1101 +index_buffer: 1620 +index_buffer: 1619 +index_buffer: 1591 +index_buffer: 1620 +index_buffer: 1591 +index_buffer: 1590 +index_buffer: 1620 +index_buffer: 1590 +index_buffer: 1095 +index_buffer: 1620 +index_buffer: 1095 +index_buffer: 1094 +index_buffer: 1620 +index_buffer: 1094 +index_buffer: 1093 +index_buffer: 1620 +index_buffer: 1093 +index_buffer: 1621 +index_buffer: 1085 +index_buffer: 1269 +index_buffer: 1622 +index_buffer: 1085 +index_buffer: 1622 +index_buffer: 1617 +index_buffer: 1617 +index_buffer: 1622 +index_buffer: 1392 +index_buffer: 1617 +index_buffer: 1392 +index_buffer: 1288 +index_buffer: 1612 +index_buffer: 1086 +index_buffer: 1618 +index_buffer: 1612 +index_buffer: 1618 +index_buffer: 1613 +index_buffer: 1613 +index_buffer: 1618 +index_buffer: 1285 +index_buffer: 1613 +index_buffer: 1285 +index_buffer: 1284 +index_buffer: 1616 +index_buffer: 1614 +index_buffer: 1530 +index_buffer: 1616 +index_buffer: 1530 +index_buffer: 1529 +index_buffer: 1623 +index_buffer: 1589 +index_buffer: 1588 +index_buffer: 1623 +index_buffer: 1588 +index_buffer: 1594 +index_buffer: 1623 +index_buffer: 1594 +index_buffer: 1394 +index_buffer: 1623 +index_buffer: 1394 +index_buffer: 1393 +index_buffer: 1622 +index_buffer: 1623 +index_buffer: 1393 +index_buffer: 1622 +index_buffer: 1393 +index_buffer: 1392 +index_buffer: 1269 +index_buffer: 1589 +index_buffer: 1623 +index_buffer: 1269 +index_buffer: 1623 +index_buffer: 1622 +index_buffer: 1598 +index_buffer: 1080 +index_buffer: 1615 +index_buffer: 1598 +index_buffer: 1615 +index_buffer: 1550 +index_buffer: 1551 +index_buffer: 1550 +index_buffer: 1615 +index_buffer: 1551 +index_buffer: 1615 +index_buffer: 1528 +index_buffer: 1624 +index_buffer: 1625 +index_buffer: 1626 +index_buffer: 1624 +index_buffer: 1626 +index_buffer: 1627 +index_buffer: 1624 +index_buffer: 1627 +index_buffer: 1607 +index_buffer: 1624 +index_buffer: 1607 +index_buffer: 1610 +index_buffer: 1588 +index_buffer: 1587 +index_buffer: 1628 +index_buffer: 1588 +index_buffer: 1628 +index_buffer: 1595 +index_buffer: 1628 +index_buffer: 1587 +index_buffer: 1586 +index_buffer: 1628 +index_buffer: 1586 +index_buffer: 1629 +index_buffer: 1090 +index_buffer: 1089 +index_buffer: 1628 +index_buffer: 1090 +index_buffer: 1628 +index_buffer: 1629 +index_buffer: 1095 +index_buffer: 1595 +index_buffer: 1628 +index_buffer: 1095 +index_buffer: 1628 +index_buffer: 1089 +index_buffer: 1630 +index_buffer: 1631 +index_buffer: 1632 +index_buffer: 1630 +index_buffer: 1632 +index_buffer: 1633 +index_buffer: 1387 +index_buffer: 1630 +index_buffer: 1633 +index_buffer: 1387 +index_buffer: 1633 +index_buffer: 1388 +index_buffer: 1592 +index_buffer: 1630 +index_buffer: 1387 +index_buffer: 1592 +index_buffer: 1387 +index_buffer: 1394 +index_buffer: 1591 +index_buffer: 1631 +index_buffer: 1630 +index_buffer: 1591 +index_buffer: 1630 +index_buffer: 1592 +index_buffer: 1632 +index_buffer: 1634 +index_buffer: 1635 +index_buffer: 1632 +index_buffer: 1635 +index_buffer: 1633 +index_buffer: 1634 +index_buffer: 1507 +index_buffer: 1606 +index_buffer: 1634 +index_buffer: 1606 +index_buffer: 1635 +index_buffer: 1635 +index_buffer: 1606 +index_buffer: 1554 +index_buffer: 1635 +index_buffer: 1554 +index_buffer: 1553 +index_buffer: 1388 +index_buffer: 1633 +index_buffer: 1635 +index_buffer: 1388 +index_buffer: 1635 +index_buffer: 1553 +index_buffer: 1458 +index_buffer: 1457 +index_buffer: 1546 +index_buffer: 1458 +index_buffer: 1546 +index_buffer: 1538 +index_buffer: 1537 +index_buffer: 1566 +index_buffer: 1565 +index_buffer: 1537 +index_buffer: 1565 +index_buffer: 1567 +index_buffer: 1461 +index_buffer: 1636 +index_buffer: 1637 +index_buffer: 1461 +index_buffer: 1637 +index_buffer: 1462 +index_buffer: 1463 +index_buffer: 1462 +index_buffer: 1637 +index_buffer: 1463 +index_buffer: 1637 +index_buffer: 1277 +index_buffer: 1450 +index_buffer: 1453 +index_buffer: 1358 +index_buffer: 1450 +index_buffer: 1358 +index_buffer: 1355 +index_buffer: 1477 +index_buffer: 1396 +index_buffer: 1395 +index_buffer: 1470 +index_buffer: 1070 +index_buffer: 1463 +index_buffer: 1603 +index_buffer: 1463 +index_buffer: 1277 +index_buffer: 1603 +index_buffer: 1277 +index_buffer: 1602 +index_buffer: 893 +index_buffer: 1032 +index_buffer: 1638 +index_buffer: 893 +index_buffer: 1638 +index_buffer: 894 +index_buffer: 1638 +index_buffer: 1639 +index_buffer: 899 +index_buffer: 1638 +index_buffer: 899 +index_buffer: 894 +index_buffer: 1639 +index_buffer: 1640 +index_buffer: 900 +index_buffer: 1639 +index_buffer: 900 +index_buffer: 899 +index_buffer: 1640 +index_buffer: 1641 +index_buffer: 905 +index_buffer: 1640 +index_buffer: 905 +index_buffer: 900 +index_buffer: 906 +index_buffer: 905 +index_buffer: 1641 +index_buffer: 906 +index_buffer: 1641 +index_buffer: 1642 +index_buffer: 1642 +index_buffer: 1643 +index_buffer: 911 +index_buffer: 1642 +index_buffer: 911 +index_buffer: 906 +index_buffer: 911 +index_buffer: 1643 +index_buffer: 1644 +index_buffer: 911 +index_buffer: 1644 +index_buffer: 912 +index_buffer: 912 +index_buffer: 1644 +index_buffer: 1645 +index_buffer: 912 +index_buffer: 1645 +index_buffer: 949 +index_buffer: 950 +index_buffer: 949 +index_buffer: 1645 +index_buffer: 950 +index_buffer: 1645 +index_buffer: 1646 +index_buffer: 1452 +index_buffer: 950 +index_buffer: 1646 +index_buffer: 1452 +index_buffer: 1646 +index_buffer: 1453 +index_buffer: 1647 +index_buffer: 1648 +index_buffer: 1082 +index_buffer: 1647 +index_buffer: 1082 +index_buffer: 1081 +index_buffer: 1081 +index_buffer: 1086 +index_buffer: 1612 +index_buffer: 1081 +index_buffer: 1612 +index_buffer: 1647 +index_buffer: 1523 +index_buffer: 1483 +index_buffer: 1482 +index_buffer: 1523 +index_buffer: 1482 +index_buffer: 1272 +index_buffer: 1281 +index_buffer: 1280 +index_buffer: 1473 +index_buffer: 1281 +index_buffer: 1473 +index_buffer: 1536 +index_buffer: 1536 +index_buffer: 1473 +index_buffer: 1472 +index_buffer: 1536 +index_buffer: 1472 +index_buffer: 1566 +index_buffer: 1483 +index_buffer: 1649 +index_buffer: 1650 +index_buffer: 1483 +index_buffer: 1650 +index_buffer: 1484 +index_buffer: 1484 +index_buffer: 1650 +index_buffer: 1035 +index_buffer: 1484 +index_buffer: 1035 +index_buffer: 1034 +index_buffer: 1036 +index_buffer: 1385 +index_buffer: 1522 +index_buffer: 1036 +index_buffer: 1522 +index_buffer: 1037 +index_buffer: 1054 +index_buffer: 1053 +index_buffer: 1651 +index_buffer: 1054 +index_buffer: 1651 +index_buffer: 1652 +index_buffer: 1056 +index_buffer: 1054 +index_buffer: 1652 +index_buffer: 1056 +index_buffer: 1652 +index_buffer: 1653 +index_buffer: 1653 +index_buffer: 1654 +index_buffer: 1059 +index_buffer: 1653 +index_buffer: 1059 +index_buffer: 1056 +index_buffer: 1059 +index_buffer: 1654 +index_buffer: 1655 +index_buffer: 1059 +index_buffer: 1655 +index_buffer: 1060 +index_buffer: 1066 +index_buffer: 1065 +index_buffer: 1656 +index_buffer: 1066 +index_buffer: 1656 +index_buffer: 1657 +index_buffer: 1658 +index_buffer: 1068 +index_buffer: 1066 +index_buffer: 1658 +index_buffer: 1066 +index_buffer: 1657 +index_buffer: 1611 +index_buffer: 1659 +index_buffer: 1097 +index_buffer: 1611 +index_buffer: 1097 +index_buffer: 1100 +index_buffer: 1659 +index_buffer: 1660 +index_buffer: 1098 +index_buffer: 1659 +index_buffer: 1098 +index_buffer: 1097 +index_buffer: 1661 +index_buffer: 1662 +index_buffer: 1102 +index_buffer: 1661 +index_buffer: 1102 +index_buffer: 1106 +index_buffer: 1102 +index_buffer: 1662 +index_buffer: 1663 +index_buffer: 1102 +index_buffer: 1663 +index_buffer: 1103 +index_buffer: 1103 +index_buffer: 1663 +index_buffer: 1664 +index_buffer: 1103 +index_buffer: 1664 +index_buffer: 1109 +index_buffer: 1664 +index_buffer: 1665 +index_buffer: 1110 +index_buffer: 1664 +index_buffer: 1110 +index_buffer: 1109 +index_buffer: 1110 +index_buffer: 1665 +index_buffer: 1666 +index_buffer: 1110 +index_buffer: 1666 +index_buffer: 1113 +index_buffer: 1113 +index_buffer: 1666 +index_buffer: 1667 +index_buffer: 1113 +index_buffer: 1667 +index_buffer: 1114 +index_buffer: 1114 +index_buffer: 1667 +index_buffer: 1668 +index_buffer: 1114 +index_buffer: 1668 +index_buffer: 1116 +index_buffer: 1116 +index_buffer: 1668 +index_buffer: 1669 +index_buffer: 1116 +index_buffer: 1669 +index_buffer: 1117 +index_buffer: 1669 +index_buffer: 1670 +index_buffer: 1121 +index_buffer: 1669 +index_buffer: 1121 +index_buffer: 1117 +index_buffer: 1670 +index_buffer: 1671 +index_buffer: 1122 +index_buffer: 1670 +index_buffer: 1122 +index_buffer: 1121 +index_buffer: 1671 +index_buffer: 1672 +index_buffer: 1125 +index_buffer: 1671 +index_buffer: 1125 +index_buffer: 1122 +index_buffer: 1672 +index_buffer: 1673 +index_buffer: 1126 +index_buffer: 1672 +index_buffer: 1126 +index_buffer: 1125 +index_buffer: 1126 +index_buffer: 1673 +index_buffer: 1674 +index_buffer: 1126 +index_buffer: 1674 +index_buffer: 1129 +index_buffer: 1129 +index_buffer: 1674 +index_buffer: 1675 +index_buffer: 1129 +index_buffer: 1675 +index_buffer: 1130 +index_buffer: 1130 +index_buffer: 1675 +index_buffer: 1676 +index_buffer: 1130 +index_buffer: 1676 +index_buffer: 1134 +index_buffer: 1134 +index_buffer: 1676 +index_buffer: 1677 +index_buffer: 1134 +index_buffer: 1677 +index_buffer: 1135 +index_buffer: 1139 +index_buffer: 1678 +index_buffer: 1679 +index_buffer: 1139 +index_buffer: 1679 +index_buffer: 1680 +index_buffer: 1680 +index_buffer: 1679 +index_buffer: 1681 +index_buffer: 1680 +index_buffer: 1681 +index_buffer: 1144 +index_buffer: 1681 +index_buffer: 1682 +index_buffer: 1140 +index_buffer: 1681 +index_buffer: 1140 +index_buffer: 1144 +index_buffer: 1683 +index_buffer: 1141 +index_buffer: 1140 +index_buffer: 1683 +index_buffer: 1140 +index_buffer: 1682 +index_buffer: 1684 +index_buffer: 1149 +index_buffer: 1151 +index_buffer: 1684 +index_buffer: 1151 +index_buffer: 1685 +index_buffer: 1686 +index_buffer: 1146 +index_buffer: 1149 +index_buffer: 1686 +index_buffer: 1149 +index_buffer: 1684 +index_buffer: 1687 +index_buffer: 1152 +index_buffer: 281 +index_buffer: 1687 +index_buffer: 281 +index_buffer: 828 +index_buffer: 1543 +index_buffer: 1688 +index_buffer: 1689 +index_buffer: 1543 +index_buffer: 1689 +index_buffer: 1540 +index_buffer: 1464 +index_buffer: 1467 +index_buffer: 1690 +index_buffer: 1464 +index_buffer: 1690 +index_buffer: 1691 +index_buffer: 1464 +index_buffer: 1691 +index_buffer: 1692 +index_buffer: 1464 +index_buffer: 1692 +index_buffer: 1158 +index_buffer: 1155 +index_buffer: 1158 +index_buffer: 1692 +index_buffer: 1155 +index_buffer: 1692 +index_buffer: 1693 +index_buffer: 1155 +index_buffer: 1693 +index_buffer: 1694 +index_buffer: 1155 +index_buffer: 1694 +index_buffer: 1159 +index_buffer: 1694 +index_buffer: 1695 +index_buffer: 1161 +index_buffer: 1694 +index_buffer: 1161 +index_buffer: 1159 +index_buffer: 1695 +index_buffer: 1696 +index_buffer: 1163 +index_buffer: 1695 +index_buffer: 1163 +index_buffer: 1161 +index_buffer: 1696 +index_buffer: 1697 +index_buffer: 1165 +index_buffer: 1696 +index_buffer: 1165 +index_buffer: 1163 +index_buffer: 1165 +index_buffer: 1697 +index_buffer: 1651 +index_buffer: 1165 +index_buffer: 1651 +index_buffer: 1053 +index_buffer: 1461 +index_buffer: 1068 +index_buffer: 1658 +index_buffer: 1461 +index_buffer: 1658 +index_buffer: 1698 +index_buffer: 1655 +index_buffer: 1699 +index_buffer: 1486 +index_buffer: 1655 +index_buffer: 1486 +index_buffer: 1060 +index_buffer: 1699 +index_buffer: 1656 +index_buffer: 1065 +index_buffer: 1699 +index_buffer: 1065 +index_buffer: 1486 +index_buffer: 1660 +index_buffer: 1700 +index_buffer: 1502 +index_buffer: 1660 +index_buffer: 1502 +index_buffer: 1098 +index_buffer: 1700 +index_buffer: 1661 +index_buffer: 1106 +index_buffer: 1700 +index_buffer: 1106 +index_buffer: 1502 +index_buffer: 1467 +index_buffer: 1540 +index_buffer: 1689 +index_buffer: 1467 +index_buffer: 1689 +index_buffer: 1690 +index_buffer: 1572 +index_buffer: 1141 +index_buffer: 1683 +index_buffer: 1572 +index_buffer: 1683 +index_buffer: 1701 +index_buffer: 1151 +index_buffer: 1572 +index_buffer: 1701 +index_buffer: 1151 +index_buffer: 1701 +index_buffer: 1685 +index_buffer: 714 +index_buffer: 1146 +index_buffer: 1686 +index_buffer: 714 +index_buffer: 1686 +index_buffer: 843 +index_buffer: 1636 +index_buffer: 1702 +index_buffer: 1703 +index_buffer: 1636 +index_buffer: 1703 +index_buffer: 1704 +index_buffer: 1704 +index_buffer: 1703 +index_buffer: 1705 +index_buffer: 1704 +index_buffer: 1705 +index_buffer: 1706 +index_buffer: 1707 +index_buffer: 1708 +index_buffer: 1073 +index_buffer: 1707 +index_buffer: 1073 +index_buffer: 1076 +index_buffer: 1073 +index_buffer: 1708 +index_buffer: 1709 +index_buffer: 1073 +index_buffer: 1709 +index_buffer: 1077 +index_buffer: 1710 +index_buffer: 1711 +index_buffer: 1629 +index_buffer: 1710 +index_buffer: 1629 +index_buffer: 1586 +index_buffer: 1712 +index_buffer: 1090 +index_buffer: 1629 +index_buffer: 1712 +index_buffer: 1629 +index_buffer: 1711 +index_buffer: 1713 +index_buffer: 1714 +index_buffer: 1083 +index_buffer: 1713 +index_buffer: 1083 +index_buffer: 1082 +index_buffer: 1715 +index_buffer: 1716 +index_buffer: 1717 +index_buffer: 1715 +index_buffer: 1717 +index_buffer: 1718 +index_buffer: 1719 +index_buffer: 1720 +index_buffer: 1715 +index_buffer: 1719 +index_buffer: 1715 +index_buffer: 1718 +index_buffer: 1267 +index_buffer: 1721 +index_buffer: 1722 +index_buffer: 1267 +index_buffer: 1722 +index_buffer: 1584 +index_buffer: 1584 +index_buffer: 1722 +index_buffer: 1710 +index_buffer: 1584 +index_buffer: 1710 +index_buffer: 1586 +index_buffer: 1626 +index_buffer: 1625 +index_buffer: 1092 +index_buffer: 1626 +index_buffer: 1092 +index_buffer: 1723 +index_buffer: 1076 +index_buffer: 1597 +index_buffer: 1724 +index_buffer: 1076 +index_buffer: 1724 +index_buffer: 1707 +index_buffer: 1083 +index_buffer: 1714 +index_buffer: 1721 +index_buffer: 1083 +index_buffer: 1721 +index_buffer: 1267 +index_buffer: 1461 +index_buffer: 1698 +index_buffer: 1702 +index_buffer: 1461 +index_buffer: 1702 +index_buffer: 1636 +index_buffer: 1725 +index_buffer: 1713 +index_buffer: 1082 +index_buffer: 1725 +index_buffer: 1082 +index_buffer: 1648 +index_buffer: 1092 +index_buffer: 1625 +index_buffer: 1624 +index_buffer: 1092 +index_buffer: 1624 +index_buffer: 1091 +index_buffer: 1091 +index_buffer: 1624 +index_buffer: 1610 +index_buffer: 1091 +index_buffer: 1610 +index_buffer: 1093 +index_buffer: 1093 +index_buffer: 1610 +index_buffer: 1609 +index_buffer: 1093 +index_buffer: 1609 +index_buffer: 1621 +index_buffer: 1609 +index_buffer: 1100 +index_buffer: 1101 +index_buffer: 1609 +index_buffer: 1101 +index_buffer: 1621 +index_buffer: 1631 +index_buffer: 1726 +index_buffer: 1727 +index_buffer: 1631 +index_buffer: 1727 +index_buffer: 1632 +index_buffer: 1619 +index_buffer: 1726 +index_buffer: 1631 +index_buffer: 1619 +index_buffer: 1631 +index_buffer: 1591 +index_buffer: 1727 +index_buffer: 1726 +index_buffer: 1619 +index_buffer: 1727 +index_buffer: 1619 +index_buffer: 1101 +index_buffer: 1101 +index_buffer: 1508 +index_buffer: 1728 +index_buffer: 1101 +index_buffer: 1728 +index_buffer: 1727 +index_buffer: 1504 +index_buffer: 1728 +index_buffer: 1508 +index_buffer: 1728 +index_buffer: 1504 +index_buffer: 1507 +index_buffer: 1728 +index_buffer: 1507 +index_buffer: 1634 +index_buffer: 1727 +index_buffer: 1728 +index_buffer: 1634 +index_buffer: 1727 +index_buffer: 1634 +index_buffer: 1632 +index_buffer: 1096 +index_buffer: 1509 +index_buffer: 1508 +index_buffer: 1096 +index_buffer: 1508 +index_buffer: 1101 +index_buffer: 1099 +index_buffer: 1510 +index_buffer: 1509 +index_buffer: 1099 +index_buffer: 1509 +index_buffer: 1096 +index_buffer: 1099 +index_buffer: 1501 +index_buffer: 1510 +index_buffer: 1501 +index_buffer: 1107 +index_buffer: 1297 +index_buffer: 1501 +index_buffer: 1297 +index_buffer: 1296 +index_buffer: 1729 +index_buffer: 1730 +index_buffer: 975 +index_buffer: 1729 +index_buffer: 975 +index_buffer: 974 +index_buffer: 1729 +index_buffer: 981 +index_buffer: 978 +index_buffer: 1729 +index_buffer: 978 +index_buffer: 1730 +index_buffer: 1730 +index_buffer: 978 +index_buffer: 977 +index_buffer: 1730 +index_buffer: 977 +index_buffer: 1731 +index_buffer: 1730 +index_buffer: 1731 +index_buffer: 976 +index_buffer: 1730 +index_buffer: 976 +index_buffer: 975 +index_buffer: 1680 +index_buffer: 1144 +index_buffer: 1145 +index_buffer: 1680 +index_buffer: 1145 +index_buffer: 1732 +index_buffer: 1136 +index_buffer: 1139 +index_buffer: 1680 +index_buffer: 1136 +index_buffer: 1680 +index_buffer: 1732 +index_buffer: 1202 +index_buffer: 1201 +index_buffer: 1733 +index_buffer: 1202 +index_buffer: 1733 +index_buffer: 1496 +index_buffer: 1496 +index_buffer: 1733 +index_buffer: 1203 +index_buffer: 1496 +index_buffer: 1203 +index_buffer: 1206 +index_buffer: 1731 +index_buffer: 1734 +index_buffer: 1339 +index_buffer: 1731 +index_buffer: 1339 +index_buffer: 976 +index_buffer: 1334 +index_buffer: 1734 +index_buffer: 1731 +index_buffer: 1334 +index_buffer: 1731 +index_buffer: 977 +index_buffer: 1734 +index_buffer: 1334 +index_buffer: 1336 +index_buffer: 1734 +index_buffer: 1336 +index_buffer: 1604 +index_buffer: 1339 +index_buffer: 1734 +index_buffer: 1604 +index_buffer: 1339 +index_buffer: 1604 +index_buffer: 1340 +index_buffer: 1497 +index_buffer: 1422 +index_buffer: 1421 +index_buffer: 1497 +index_buffer: 1421 +index_buffer: 1735 +index_buffer: 1497 +index_buffer: 1735 +index_buffer: 1420 +index_buffer: 1497 +index_buffer: 1420 +index_buffer: 1419 +index_buffer: 1735 +index_buffer: 1729 +index_buffer: 974 +index_buffer: 1735 +index_buffer: 974 +index_buffer: 1420 +index_buffer: 1735 +index_buffer: 1421 +index_buffer: 981 +index_buffer: 1735 +index_buffer: 981 +index_buffer: 1729 +index_buffer: 1200 +index_buffer: 1202 +index_buffer: 1419 +index_buffer: 1200 +index_buffer: 1419 +index_buffer: 1416 +index_buffer: 1197 +index_buffer: 1200 +index_buffer: 1416 +index_buffer: 1197 +index_buffer: 1416 +index_buffer: 1415 +index_buffer: 1732 +index_buffer: 1145 +index_buffer: 1342 +index_buffer: 1732 +index_buffer: 1342 +index_buffer: 1341 +index_buffer: 1135 +index_buffer: 1677 +index_buffer: 1736 +index_buffer: 1135 +index_buffer: 1736 +index_buffer: 1138 +index_buffer: 1138 +index_buffer: 1736 +index_buffer: 1678 +index_buffer: 1138 +index_buffer: 1678 +index_buffer: 1139 +index_buffer: 1737 +index_buffer: 1087 +index_buffer: 1090 +index_buffer: 1737 +index_buffer: 1090 +index_buffer: 1712 +index_buffer: 1723 +index_buffer: 1092 +index_buffer: 1087 +index_buffer: 1723 +index_buffer: 1087 +index_buffer: 1737 +index_buffer: 1274 +index_buffer: 1272 +index_buffer: 1482 +index_buffer: 1274 +index_buffer: 1482 +index_buffer: 1072 +index_buffer: 175 +index_buffer: 1050 +index_buffer: 1352 +index_buffer: 175 +index_buffer: 1352 +index_buffer: 488 +index_buffer: 1544 +index_buffer: 1450 +index_buffer: 1355 +index_buffer: 1544 +index_buffer: 1355 +index_buffer: 1354 +index_buffer: 1453 +index_buffer: 1646 +index_buffer: 1359 +index_buffer: 1453 +index_buffer: 1359 +index_buffer: 1358 +index_buffer: 1646 +index_buffer: 1645 +index_buffer: 1362 +index_buffer: 1646 +index_buffer: 1362 +index_buffer: 1359 +index_buffer: 1645 +index_buffer: 1644 +index_buffer: 1364 +index_buffer: 1645 +index_buffer: 1364 +index_buffer: 1362 +index_buffer: 1643 +index_buffer: 1366 +index_buffer: 1364 +index_buffer: 1643 +index_buffer: 1364 +index_buffer: 1644 +index_buffer: 1643 +index_buffer: 1642 +index_buffer: 1368 +index_buffer: 1643 +index_buffer: 1368 +index_buffer: 1366 +index_buffer: 1642 +index_buffer: 1641 +index_buffer: 1370 +index_buffer: 1642 +index_buffer: 1370 +index_buffer: 1368 +index_buffer: 1370 +index_buffer: 1641 +index_buffer: 1640 +index_buffer: 1370 +index_buffer: 1640 +index_buffer: 1372 +index_buffer: 1640 +index_buffer: 1639 +index_buffer: 1374 +index_buffer: 1640 +index_buffer: 1374 +index_buffer: 1372 +index_buffer: 1639 +index_buffer: 1638 +index_buffer: 1376 +index_buffer: 1639 +index_buffer: 1376 +index_buffer: 1374 +index_buffer: 1032 +index_buffer: 1378 +index_buffer: 1376 +index_buffer: 1032 +index_buffer: 1376 +index_buffer: 1638 +index_buffer: 1650 +index_buffer: 1382 +index_buffer: 1381 +index_buffer: 1650 +index_buffer: 1381 +index_buffer: 1035 +index_buffer: 1650 +index_buffer: 1649 +index_buffer: 1384 +index_buffer: 1650 +index_buffer: 1384 +index_buffer: 1382 +index_buffer: 1071 +index_buffer: 1469 +index_buffer: 1274 +index_buffer: 1071 +index_buffer: 1274 +index_buffer: 1072 +index_buffer: 1070 +index_buffer: 1470 +index_buffer: 1469 +index_buffer: 1070 +index_buffer: 1469 +index_buffer: 1071 +index_buffer: 1491 +index_buffer: 1378 +index_buffer: 1032 +index_buffer: 1491 +index_buffer: 1032 +index_buffer: 1031 +index_buffer: 1381 +index_buffer: 1491 +index_buffer: 1031 +index_buffer: 1381 +index_buffer: 1031 +index_buffer: 1035 +index_buffer: 1384 +index_buffer: 1649 +index_buffer: 1483 +index_buffer: 1384 +index_buffer: 1483 +index_buffer: 1523 +index_buffer: 1108 +index_buffer: 1111 +index_buffer: 1309 +index_buffer: 1108 +index_buffer: 1309 +index_buffer: 1308 +index_buffer: 1112 +index_buffer: 1115 +index_buffer: 1315 +index_buffer: 1112 +index_buffer: 1315 +index_buffer: 1314 +index_buffer: 1119 +index_buffer: 1118 +index_buffer: 1320 +index_buffer: 1119 +index_buffer: 1320 +index_buffer: 1319 +index_buffer: 1120 +index_buffer: 1123 +index_buffer: 1324 +index_buffer: 1120 +index_buffer: 1324 +index_buffer: 1323 +index_buffer: 1327 +index_buffer: 1124 +index_buffer: 1127 +index_buffer: 1327 +index_buffer: 1127 +index_buffer: 1328 +index_buffer: 1331 +index_buffer: 1128 +index_buffer: 1131 +index_buffer: 1331 +index_buffer: 1131 +index_buffer: 1332 +index_buffer: 1336 +index_buffer: 1335 +index_buffer: 1133 +index_buffer: 1336 +index_buffer: 1133 +index_buffer: 1132 +index_buffer: 1340 +index_buffer: 1604 +index_buffer: 1137 +index_buffer: 1340 +index_buffer: 1137 +index_buffer: 1136 +index_buffer: 1571 +index_buffer: 1150 +index_buffer: 1348 +index_buffer: 1571 +index_buffer: 1348 +index_buffer: 1347 +index_buffer: 1143 +index_buffer: 1142 +index_buffer: 1346 +index_buffer: 1143 +index_buffer: 1346 +index_buffer: 1575 +index_buffer: 1579 +index_buffer: 1148 +index_buffer: 1147 +index_buffer: 1579 +index_buffer: 1147 +index_buffer: 1351 +index_buffer: 1340 +index_buffer: 1136 +index_buffer: 1732 +index_buffer: 1340 +index_buffer: 1732 +index_buffer: 1341 +index_buffer: 1637 +index_buffer: 1738 +index_buffer: 1549 +index_buffer: 1637 +index_buffer: 1549 +index_buffer: 1277 +index_buffer: 1636 +index_buffer: 1704 +index_buffer: 1738 +index_buffer: 1636 +index_buffer: 1738 +index_buffer: 1637 +index_buffer: 1704 +index_buffer: 1706 +index_buffer: 1739 +index_buffer: 1704 +index_buffer: 1739 +index_buffer: 1738 +index_buffer: 1738 +index_buffer: 1739 +index_buffer: 1600 +index_buffer: 1738 +index_buffer: 1600 +index_buffer: 1549 +index_buffer: 1297 +index_buffer: 1107 +index_buffer: 1105 +index_buffer: 1297 +index_buffer: 1105 +index_buffer: 1302 +index_buffer: 1706 +index_buffer: 1705 +index_buffer: 1740 +index_buffer: 1706 +index_buffer: 1740 +index_buffer: 1741 +index_buffer: 1717 +index_buffer: 1716 +index_buffer: 1741 +index_buffer: 1717 +index_buffer: 1741 +index_buffer: 1740 +index_buffer: 1739 +index_buffer: 1706 +index_buffer: 1741 +index_buffer: 1739 +index_buffer: 1741 +index_buffer: 1742 +index_buffer: 1741 +index_buffer: 1716 +index_buffer: 1715 +index_buffer: 1741 +index_buffer: 1715 +index_buffer: 1742 +index_buffer: 1599 +index_buffer: 1742 +index_buffer: 1715 +index_buffer: 1599 +index_buffer: 1715 +index_buffer: 1720 +index_buffer: 1739 +index_buffer: 1742 +index_buffer: 1599 +index_buffer: 1739 +index_buffer: 1599 +index_buffer: 1600 +index_buffer: 1600 +index_buffer: 1598 +index_buffer: 1550 +index_buffer: 1600 +index_buffer: 1550 +index_buffer: 1549 +index_buffer: 1599 +index_buffer: 1720 +index_buffer: 1597 +index_buffer: 1599 +index_buffer: 1597 +index_buffer: 1596 +index_buffer: 1720 +index_buffer: 1719 +index_buffer: 1724 +index_buffer: 1720 +index_buffer: 1724 +index_buffer: 1597 +index_buffer: 1516 +index_buffer: 1519 +index_buffer: 1041 +index_buffer: 1516 +index_buffer: 1041 +index_buffer: 1040 +index_buffer: 1078 +index_buffer: 1077 +index_buffer: 1648 +index_buffer: 1078 +index_buffer: 1648 +index_buffer: 1647 +index_buffer: 1272 +index_buffer: 1271 +index_buffer: 1522 +index_buffer: 1272 +index_buffer: 1522 +index_buffer: 1523 +index_buffer: 1526 +index_buffer: 1282 +index_buffer: 1283 +index_buffer: 1526 +index_buffer: 1283 +index_buffer: 1527 +index_buffer: 1038 +index_buffer: 1041 +index_buffer: 1520 +index_buffer: 1038 +index_buffer: 1520 +index_buffer: 1042 +index_buffer: 1078 +index_buffer: 1647 +index_buffer: 1612 +index_buffer: 1078 +index_buffer: 1612 +index_buffer: 1521 +index_buffer: 1530 +index_buffer: 1284 +index_buffer: 1278 +index_buffer: 1530 +index_buffer: 1278 +index_buffer: 1282 +index_buffer: 1527 +index_buffer: 1283 +index_buffer: 1535 +index_buffer: 1527 +index_buffer: 1535 +index_buffer: 1539 +index_buffer: 1535 +index_buffer: 1536 +index_buffer: 1566 +index_buffer: 1535 +index_buffer: 1566 +index_buffer: 1537 +index_buffer: 1520 +index_buffer: 1455 +index_buffer: 1458 +index_buffer: 1520 +index_buffer: 1458 +index_buffer: 1567 +index_buffer: 1516 +index_buffer: 1040 +index_buffer: 1271 +index_buffer: 1516 +index_buffer: 1271 +index_buffer: 1270 +index_buffer: 1079 +index_buffer: 1521 +index_buffer: 1614 +index_buffer: 1079 +index_buffer: 1614 +index_buffer: 1616 +index_buffer: 1614 +index_buffer: 1613 +index_buffer: 1284 +index_buffer: 1614 +index_buffer: 1284 +index_buffer: 1530 +index_buffer: 1567 +index_buffer: 1458 +index_buffer: 1538 +index_buffer: 1567 +index_buffer: 1538 +index_buffer: 1537 +index_buffer: 1725 +index_buffer: 1648 +index_buffer: 1077 +index_buffer: 1725 +index_buffer: 1077 +index_buffer: 1709 +index_buffer: 1602 +index_buffer: 1277 +index_buffer: 1276 +index_buffer: 1275 +index_buffer: 1277 +index_buffer: 1601 +index_buffer: 1603 +index_buffer: 1471 +index_buffer: 1470 +index_buffer: 1603 +index_buffer: 1470 +index_buffer: 1463 +index_buffer: 1743 +index_buffer: 1744 +index_buffer: 956 +index_buffer: 1743 +index_buffer: 956 +index_buffer: 955 +index_buffer: 954 +index_buffer: 956 +index_buffer: 1744 +index_buffer: 954 +index_buffer: 1744 +index_buffer: 1745 +index_buffer: 964 +index_buffer: 1583 +index_buffer: 1744 +index_buffer: 964 +index_buffer: 1744 +index_buffer: 1743 +index_buffer: 1580 +index_buffer: 1745 +index_buffer: 1744 +index_buffer: 1580 +index_buffer: 1744 +index_buffer: 1583 +index_buffer: 955 +index_buffer: 958 +index_buffer: 1047 +index_buffer: 955 +index_buffer: 1047 +index_buffer: 1266 +index_buffer: 1408 +index_buffer: 1743 +index_buffer: 955 +index_buffer: 1408 +index_buffer: 955 +index_buffer: 1266 +index_buffer: 1743 +index_buffer: 1408 +index_buffer: 1410 +index_buffer: 1743 +index_buffer: 1410 +index_buffer: 964 +index_buffer: 954 +index_buffer: 953 +index_buffer: 957 +index_buffer: 954 +index_buffer: 957 +index_buffer: 956 +index_buffer: 1534 +index_buffer: 1544 +index_buffer: 1050 +index_buffer: 1534 +index_buffer: 1050 +index_buffer: 1049 +index_buffer: 1049 +index_buffer: 1051 +index_buffer: 1531 +index_buffer: 1049 +index_buffer: 1531 +index_buffer: 1534 +index_buffer: 1543 +index_buffer: 1542 +index_buffer: 1153 +index_buffer: 1543 +index_buffer: 1153 +index_buffer: 1152 +index_buffer: 1542 +index_buffer: 1532 +index_buffer: 1154 +index_buffer: 1542 +index_buffer: 1154 +index_buffer: 1153 +index_buffer: 953 +index_buffer: 1255 +index_buffer: 1547 +index_buffer: 953 +index_buffer: 1547 +index_buffer: 957 +index_buffer: 1255 +index_buffer: 1256 +index_buffer: 1548 +index_buffer: 1255 +index_buffer: 1548 +index_buffer: 1547 +index_buffer: 1352 +index_buffer: 1354 +index_buffer: 1357 +index_buffer: 1352 +index_buffer: 1357 +index_buffer: 1353 +index_buffer: 1154 +index_buffer: 1532 +index_buffer: 1531 +index_buffer: 1154 +index_buffer: 1531 +index_buffer: 1051 +index_buffer: 1256 +index_buffer: 1353 +index_buffer: 1357 +index_buffer: 1256 +index_buffer: 1357 +index_buffer: 1548 +index_buffer: 1688 +index_buffer: 1543 +index_buffer: 1152 +index_buffer: 1688 +index_buffer: 1152 +index_buffer: 1687 +index_buffer: 1544 +index_buffer: 1354 +index_buffer: 1352 +index_buffer: 1544 +index_buffer: 1352 +index_buffer: 1050 +index_buffer: 76 +index_buffer: 954 +index_buffer: 1745 +index_buffer: 76 +index_buffer: 1745 +index_buffer: 888 +index_buffer: 720 +index_buffer: 888 +index_buffer: 1745 +index_buffer: 720 +index_buffer: 1745 +index_buffer: 1580 +index_buffer: 1746 +index_buffer: 1747 +index_buffer: 1748 +index_buffer: 1746 +index_buffer: 1748 +index_buffer: 1749 +index_buffer: 1747 +index_buffer: 1750 +index_buffer: 1751 +index_buffer: 1747 +index_buffer: 1751 +index_buffer: 1748 +index_buffer: 1752 +index_buffer: 1753 +index_buffer: 1754 +index_buffer: 1752 +index_buffer: 1754 +index_buffer: 1755 +index_buffer: 1753 +index_buffer: 1756 +index_buffer: 1757 +index_buffer: 1753 +index_buffer: 1757 +index_buffer: 1754 +index_buffer: 1758 +index_buffer: 1759 +index_buffer: 1753 +index_buffer: 1758 +index_buffer: 1753 +index_buffer: 1752 +index_buffer: 1753 +index_buffer: 1759 +index_buffer: 1760 +index_buffer: 1753 +index_buffer: 1760 +index_buffer: 1756 +index_buffer: 1761 +index_buffer: 1762 +index_buffer: 1759 +index_buffer: 1761 +index_buffer: 1759 +index_buffer: 1758 +index_buffer: 1762 +index_buffer: 1763 +index_buffer: 1760 +index_buffer: 1762 +index_buffer: 1760 +index_buffer: 1759 +index_buffer: 1764 +index_buffer: 1765 +index_buffer: 1763 +index_buffer: 1764 +index_buffer: 1763 +index_buffer: 1762 +index_buffer: 1762 +index_buffer: 1761 +index_buffer: 1766 +index_buffer: 1762 +index_buffer: 1766 +index_buffer: 1764 +index_buffer: 1767 +index_buffer: 1768 +index_buffer: 1764 +index_buffer: 1767 +index_buffer: 1764 +index_buffer: 1766 +index_buffer: 1768 +index_buffer: 1769 +index_buffer: 1765 +index_buffer: 1768 +index_buffer: 1765 +index_buffer: 1764 +index_buffer: 1770 +index_buffer: 1771 +index_buffer: 1769 +index_buffer: 1770 +index_buffer: 1769 +index_buffer: 1768 +index_buffer: 1772 +index_buffer: 1770 +index_buffer: 1768 +index_buffer: 1772 +index_buffer: 1768 +index_buffer: 1767 +index_buffer: 1773 +index_buffer: 1774 +index_buffer: 1770 +index_buffer: 1773 +index_buffer: 1770 +index_buffer: 1772 +index_buffer: 1774 +index_buffer: 1775 +index_buffer: 1771 +index_buffer: 1774 +index_buffer: 1771 +index_buffer: 1770 +index_buffer: 1775 +index_buffer: 1774 +index_buffer: 1776 +index_buffer: 1775 +index_buffer: 1776 +index_buffer: 1777 +index_buffer: 1778 +index_buffer: 1776 +index_buffer: 1774 +index_buffer: 1778 +index_buffer: 1774 +index_buffer: 1773 +index_buffer: 1779 +index_buffer: 1776 +index_buffer: 1778 +index_buffer: 1779 +index_buffer: 1778 +index_buffer: 1780 +index_buffer: 1779 +index_buffer: 1781 +index_buffer: 1777 +index_buffer: 1779 +index_buffer: 1777 +index_buffer: 1776 +index_buffer: 1782 +index_buffer: 1783 +index_buffer: 1781 +index_buffer: 1782 +index_buffer: 1781 +index_buffer: 1779 +index_buffer: 1784 +index_buffer: 1782 +index_buffer: 1779 +index_buffer: 1784 +index_buffer: 1779 +index_buffer: 1780 +index_buffer: 1755 +index_buffer: 1754 +index_buffer: 1747 +index_buffer: 1755 +index_buffer: 1747 +index_buffer: 1746 +index_buffer: 1754 +index_buffer: 1757 +index_buffer: 1750 +index_buffer: 1754 +index_buffer: 1750 +index_buffer: 1747 +index_buffer: 1782 +index_buffer: 1784 +index_buffer: 1785 +index_buffer: 1782 +index_buffer: 1785 +index_buffer: 1786 +index_buffer: 1787 +index_buffer: 1783 +index_buffer: 1782 +index_buffer: 1787 +index_buffer: 1782 +index_buffer: 1786 +index_buffer: 1788 +index_buffer: 1787 +index_buffer: 1786 +index_buffer: 1788 +index_buffer: 1786 +index_buffer: 1789 +index_buffer: 1790 +index_buffer: 1789 +index_buffer: 1786 +index_buffer: 1790 +index_buffer: 1786 +index_buffer: 1785 +index_buffer: 1791 +index_buffer: 1792 +index_buffer: 1751 +index_buffer: 1791 +index_buffer: 1751 +index_buffer: 1750 +index_buffer: 1793 +index_buffer: 1792 +index_buffer: 1791 +index_buffer: 1793 +index_buffer: 1791 +index_buffer: 1794 +index_buffer: 1795 +index_buffer: 1746 +index_buffer: 1749 +index_buffer: 1795 +index_buffer: 1749 +index_buffer: 1796 +index_buffer: 1795 +index_buffer: 1796 +index_buffer: 1797 +index_buffer: 1795 +index_buffer: 1797 +index_buffer: 1798 +index_buffer: 1756 +index_buffer: 1799 +index_buffer: 1800 +index_buffer: 1756 +index_buffer: 1800 +index_buffer: 1757 +index_buffer: 1799 +index_buffer: 1801 +index_buffer: 1802 +index_buffer: 1799 +index_buffer: 1802 +index_buffer: 1800 +index_buffer: 1803 +index_buffer: 1752 +index_buffer: 1755 +index_buffer: 1803 +index_buffer: 1755 +index_buffer: 1804 +index_buffer: 1805 +index_buffer: 1803 +index_buffer: 1804 +index_buffer: 1805 +index_buffer: 1804 +index_buffer: 1806 +index_buffer: 1760 +index_buffer: 1807 +index_buffer: 1799 +index_buffer: 1760 +index_buffer: 1799 +index_buffer: 1756 +index_buffer: 1801 +index_buffer: 1799 +index_buffer: 1807 +index_buffer: 1801 +index_buffer: 1807 +index_buffer: 1808 +index_buffer: 1803 +index_buffer: 1809 +index_buffer: 1758 +index_buffer: 1803 +index_buffer: 1758 +index_buffer: 1752 +index_buffer: 1809 +index_buffer: 1803 +index_buffer: 1805 +index_buffer: 1809 +index_buffer: 1805 +index_buffer: 1810 +index_buffer: 1765 +index_buffer: 1811 +index_buffer: 1812 +index_buffer: 1765 +index_buffer: 1812 +index_buffer: 1763 +index_buffer: 1760 +index_buffer: 1763 +index_buffer: 1812 +index_buffer: 1760 +index_buffer: 1812 +index_buffer: 1807 +index_buffer: 1812 +index_buffer: 1813 +index_buffer: 1808 +index_buffer: 1812 +index_buffer: 1808 +index_buffer: 1807 +index_buffer: 1811 +index_buffer: 1814 +index_buffer: 1813 +index_buffer: 1811 +index_buffer: 1813 +index_buffer: 1812 +index_buffer: 1758 +index_buffer: 1809 +index_buffer: 1815 +index_buffer: 1758 +index_buffer: 1815 +index_buffer: 1761 +index_buffer: 1815 +index_buffer: 1816 +index_buffer: 1766 +index_buffer: 1815 +index_buffer: 1766 +index_buffer: 1761 +index_buffer: 1817 +index_buffer: 1816 +index_buffer: 1815 +index_buffer: 1817 +index_buffer: 1815 +index_buffer: 1818 +index_buffer: 1815 +index_buffer: 1809 +index_buffer: 1810 +index_buffer: 1815 +index_buffer: 1810 +index_buffer: 1818 +index_buffer: 1771 +index_buffer: 1819 +index_buffer: 1820 +index_buffer: 1771 +index_buffer: 1820 +index_buffer: 1769 +index_buffer: 1769 +index_buffer: 1820 +index_buffer: 1811 +index_buffer: 1769 +index_buffer: 1811 +index_buffer: 1765 +index_buffer: 1814 +index_buffer: 1811 +index_buffer: 1820 +index_buffer: 1814 +index_buffer: 1820 +index_buffer: 1821 +index_buffer: 1821 +index_buffer: 1820 +index_buffer: 1819 +index_buffer: 1821 +index_buffer: 1819 +index_buffer: 1822 +index_buffer: 1767 +index_buffer: 1766 +index_buffer: 1816 +index_buffer: 1767 +index_buffer: 1816 +index_buffer: 1823 +index_buffer: 1823 +index_buffer: 1824 +index_buffer: 1772 +index_buffer: 1823 +index_buffer: 1772 +index_buffer: 1767 +index_buffer: 1824 +index_buffer: 1823 +index_buffer: 1825 +index_buffer: 1824 +index_buffer: 1825 +index_buffer: 1826 +index_buffer: 1817 +index_buffer: 1825 +index_buffer: 1823 +index_buffer: 1817 +index_buffer: 1823 +index_buffer: 1816 +index_buffer: 1827 +index_buffer: 1828 +index_buffer: 1775 +index_buffer: 1827 +index_buffer: 1775 +index_buffer: 1777 +index_buffer: 1775 +index_buffer: 1828 +index_buffer: 1819 +index_buffer: 1775 +index_buffer: 1819 +index_buffer: 1771 +index_buffer: 1822 +index_buffer: 1819 +index_buffer: 1828 +index_buffer: 1822 +index_buffer: 1828 +index_buffer: 1829 +index_buffer: 1827 +index_buffer: 1830 +index_buffer: 1829 +index_buffer: 1827 +index_buffer: 1829 +index_buffer: 1828 +index_buffer: 1831 +index_buffer: 1773 +index_buffer: 1772 +index_buffer: 1831 +index_buffer: 1772 +index_buffer: 1824 +index_buffer: 1832 +index_buffer: 1778 +index_buffer: 1773 +index_buffer: 1832 +index_buffer: 1773 +index_buffer: 1831 +index_buffer: 1831 +index_buffer: 1833 +index_buffer: 1834 +index_buffer: 1831 +index_buffer: 1834 +index_buffer: 1832 +index_buffer: 1831 +index_buffer: 1824 +index_buffer: 1826 +index_buffer: 1831 +index_buffer: 1826 +index_buffer: 1833 +index_buffer: 1835 +index_buffer: 1781 +index_buffer: 1783 +index_buffer: 1835 +index_buffer: 1783 +index_buffer: 1836 +index_buffer: 1827 +index_buffer: 1777 +index_buffer: 1781 +index_buffer: 1827 +index_buffer: 1781 +index_buffer: 1835 +index_buffer: 1827 +index_buffer: 1835 +index_buffer: 1837 +index_buffer: 1827 +index_buffer: 1837 +index_buffer: 1830 +index_buffer: 1836 +index_buffer: 1838 +index_buffer: 1837 +index_buffer: 1836 +index_buffer: 1837 +index_buffer: 1835 +index_buffer: 1778 +index_buffer: 1832 +index_buffer: 1839 +index_buffer: 1778 +index_buffer: 1839 +index_buffer: 1780 +index_buffer: 1840 +index_buffer: 1784 +index_buffer: 1780 +index_buffer: 1840 +index_buffer: 1780 +index_buffer: 1839 +index_buffer: 1841 +index_buffer: 1842 +index_buffer: 1840 +index_buffer: 1841 +index_buffer: 1840 +index_buffer: 1839 +index_buffer: 1841 +index_buffer: 1839 +index_buffer: 1832 +index_buffer: 1841 +index_buffer: 1832 +index_buffer: 1834 +index_buffer: 1757 +index_buffer: 1800 +index_buffer: 1791 +index_buffer: 1757 +index_buffer: 1791 +index_buffer: 1750 +index_buffer: 1800 +index_buffer: 1802 +index_buffer: 1794 +index_buffer: 1800 +index_buffer: 1794 +index_buffer: 1791 +index_buffer: 1804 +index_buffer: 1755 +index_buffer: 1746 +index_buffer: 1804 +index_buffer: 1746 +index_buffer: 1795 +index_buffer: 1798 +index_buffer: 1806 +index_buffer: 1804 +index_buffer: 1798 +index_buffer: 1804 +index_buffer: 1795 +index_buffer: 1843 +index_buffer: 1787 +index_buffer: 1788 +index_buffer: 1843 +index_buffer: 1788 +index_buffer: 1844 +index_buffer: 1787 +index_buffer: 1843 +index_buffer: 1836 +index_buffer: 1787 +index_buffer: 1836 +index_buffer: 1783 +index_buffer: 1838 +index_buffer: 1836 +index_buffer: 1843 +index_buffer: 1838 +index_buffer: 1843 +index_buffer: 1845 +index_buffer: 1844 +index_buffer: 1846 +index_buffer: 1845 +index_buffer: 1844 +index_buffer: 1845 +index_buffer: 1843 +index_buffer: 1784 +index_buffer: 1840 +index_buffer: 1847 +index_buffer: 1784 +index_buffer: 1847 +index_buffer: 1785 +index_buffer: 1848 +index_buffer: 1790 +index_buffer: 1785 +index_buffer: 1848 +index_buffer: 1785 +index_buffer: 1847 +index_buffer: 1848 +index_buffer: 1847 +index_buffer: 1849 +index_buffer: 1848 +index_buffer: 1849 +index_buffer: 1850 +index_buffer: 1842 +index_buffer: 1849 +index_buffer: 1847 +index_buffer: 1842 +index_buffer: 1847 +index_buffer: 1840 +index_buffer: 1851 +index_buffer: 1789 +index_buffer: 1790 +index_buffer: 1851 +index_buffer: 1790 +index_buffer: 1852 +index_buffer: 1789 +index_buffer: 1851 +index_buffer: 1853 +index_buffer: 1789 +index_buffer: 1853 +index_buffer: 1788 +index_buffer: 1854 +index_buffer: 1844 +index_buffer: 1788 +index_buffer: 1854 +index_buffer: 1788 +index_buffer: 1853 +index_buffer: 1854 +index_buffer: 1855 +index_buffer: 1846 +index_buffer: 1854 +index_buffer: 1846 +index_buffer: 1844 +index_buffer: 1856 +index_buffer: 1857 +index_buffer: 1858 +index_buffer: 1856 +index_buffer: 1858 +index_buffer: 1850 +index_buffer: 1859 +index_buffer: 1860 +index_buffer: 1861 +index_buffer: 1859 +index_buffer: 1861 +index_buffer: 1862 +index_buffer: 1862 +index_buffer: 1863 +index_buffer: 1864 +index_buffer: 1862 +index_buffer: 1864 +index_buffer: 1859 +index_buffer: 1865 +index_buffer: 1793 +index_buffer: 1794 +index_buffer: 1865 +index_buffer: 1794 +index_buffer: 1866 +index_buffer: 1798 +index_buffer: 1797 +index_buffer: 1867 +index_buffer: 1798 +index_buffer: 1867 +index_buffer: 1868 +index_buffer: 1869 +index_buffer: 1870 +index_buffer: 1802 +index_buffer: 1869 +index_buffer: 1802 +index_buffer: 1801 +index_buffer: 1806 +index_buffer: 1871 +index_buffer: 1872 +index_buffer: 1806 +index_buffer: 1872 +index_buffer: 1805 +index_buffer: 1808 +index_buffer: 1873 +index_buffer: 1869 +index_buffer: 1808 +index_buffer: 1869 +index_buffer: 1801 +index_buffer: 1872 +index_buffer: 1874 +index_buffer: 1810 +index_buffer: 1872 +index_buffer: 1810 +index_buffer: 1805 +index_buffer: 1808 +index_buffer: 1813 +index_buffer: 1875 +index_buffer: 1808 +index_buffer: 1875 +index_buffer: 1873 +index_buffer: 1814 +index_buffer: 1876 +index_buffer: 1875 +index_buffer: 1814 +index_buffer: 1875 +index_buffer: 1813 +index_buffer: 1877 +index_buffer: 1878 +index_buffer: 1817 +index_buffer: 1877 +index_buffer: 1817 +index_buffer: 1818 +index_buffer: 1874 +index_buffer: 1877 +index_buffer: 1818 +index_buffer: 1874 +index_buffer: 1818 +index_buffer: 1810 +index_buffer: 1821 +index_buffer: 1879 +index_buffer: 1876 +index_buffer: 1821 +index_buffer: 1876 +index_buffer: 1814 +index_buffer: 1880 +index_buffer: 1879 +index_buffer: 1821 +index_buffer: 1880 +index_buffer: 1821 +index_buffer: 1822 +index_buffer: 1825 +index_buffer: 1881 +index_buffer: 1882 +index_buffer: 1825 +index_buffer: 1882 +index_buffer: 1826 +index_buffer: 1878 +index_buffer: 1881 +index_buffer: 1825 +index_buffer: 1878 +index_buffer: 1825 +index_buffer: 1817 +index_buffer: 1829 +index_buffer: 1883 +index_buffer: 1880 +index_buffer: 1829 +index_buffer: 1880 +index_buffer: 1822 +index_buffer: 1884 +index_buffer: 1883 +index_buffer: 1829 +index_buffer: 1884 +index_buffer: 1829 +index_buffer: 1830 +index_buffer: 1834 +index_buffer: 1833 +index_buffer: 1885 +index_buffer: 1834 +index_buffer: 1885 +index_buffer: 1886 +index_buffer: 1882 +index_buffer: 1885 +index_buffer: 1833 +index_buffer: 1882 +index_buffer: 1833 +index_buffer: 1826 +index_buffer: 1884 +index_buffer: 1830 +index_buffer: 1837 +index_buffer: 1884 +index_buffer: 1837 +index_buffer: 1887 +index_buffer: 1838 +index_buffer: 1888 +index_buffer: 1887 +index_buffer: 1838 +index_buffer: 1887 +index_buffer: 1837 +index_buffer: 1889 +index_buffer: 1890 +index_buffer: 1842 +index_buffer: 1889 +index_buffer: 1842 +index_buffer: 1841 +index_buffer: 1834 +index_buffer: 1886 +index_buffer: 1889 +index_buffer: 1834 +index_buffer: 1889 +index_buffer: 1841 +index_buffer: 1870 +index_buffer: 1866 +index_buffer: 1794 +index_buffer: 1870 +index_buffer: 1794 +index_buffer: 1802 +index_buffer: 1798 +index_buffer: 1868 +index_buffer: 1871 +index_buffer: 1798 +index_buffer: 1871 +index_buffer: 1806 +index_buffer: 1845 +index_buffer: 1891 +index_buffer: 1888 +index_buffer: 1845 +index_buffer: 1888 +index_buffer: 1838 +index_buffer: 1846 +index_buffer: 1892 +index_buffer: 1891 +index_buffer: 1846 +index_buffer: 1891 +index_buffer: 1845 +index_buffer: 1893 +index_buffer: 1856 +index_buffer: 1850 +index_buffer: 1893 +index_buffer: 1850 +index_buffer: 1849 +index_buffer: 1890 +index_buffer: 1893 +index_buffer: 1849 +index_buffer: 1890 +index_buffer: 1849 +index_buffer: 1842 +index_buffer: 1892 +index_buffer: 1846 +index_buffer: 1855 +index_buffer: 1892 +index_buffer: 1855 +index_buffer: 1894 +index_buffer: 1858 +index_buffer: 1895 +index_buffer: 1848 +index_buffer: 1858 +index_buffer: 1848 +index_buffer: 1850 +index_buffer: 1790 +index_buffer: 1848 +index_buffer: 1895 +index_buffer: 1790 +index_buffer: 1895 +index_buffer: 1852 +index_buffer: 1896 +index_buffer: 1860 +index_buffer: 1859 +index_buffer: 1896 +index_buffer: 1859 +index_buffer: 1897 +index_buffer: 1898 +index_buffer: 1899 +index_buffer: 1896 +index_buffer: 1898 +index_buffer: 1896 +index_buffer: 1897 +index_buffer: 1900 +index_buffer: 1901 +index_buffer: 1898 +index_buffer: 1900 +index_buffer: 1898 +index_buffer: 1897 +index_buffer: 1864 +index_buffer: 1900 +index_buffer: 1897 +index_buffer: 1864 +index_buffer: 1897 +index_buffer: 1859 +index_buffer: 1898 +index_buffer: 1901 +index_buffer: 1853 +index_buffer: 1898 +index_buffer: 1853 +index_buffer: 1851 +index_buffer: 1852 +index_buffer: 1899 +index_buffer: 1898 +index_buffer: 1852 +index_buffer: 1898 +index_buffer: 1851 +index_buffer: 1854 +index_buffer: 1902 +index_buffer: 1903 +index_buffer: 1854 +index_buffer: 1903 +index_buffer: 1855 +index_buffer: 1854 +index_buffer: 1853 +index_buffer: 1901 +index_buffer: 1854 +index_buffer: 1901 +index_buffer: 1902 +index_buffer: 1857 +index_buffer: 1904 +index_buffer: 1905 +index_buffer: 1857 +index_buffer: 1905 +index_buffer: 1858 +index_buffer: 1906 +index_buffer: 1894 +index_buffer: 1855 +index_buffer: 1906 +index_buffer: 1855 +index_buffer: 1903 +index_buffer: 1907 +index_buffer: 1899 +index_buffer: 1852 +index_buffer: 1907 +index_buffer: 1852 +index_buffer: 1895 +index_buffer: 1907 +index_buffer: 1895 +index_buffer: 1858 +index_buffer: 1907 +index_buffer: 1858 +index_buffer: 1905 +index_buffer: 1908 +index_buffer: 1909 +index_buffer: 1910 +index_buffer: 1908 +index_buffer: 1910 +index_buffer: 1911 +index_buffer: 1908 +index_buffer: 1911 +index_buffer: 1912 +index_buffer: 1908 +index_buffer: 1912 +index_buffer: 1913 +index_buffer: 1914 +index_buffer: 1915 +index_buffer: 1916 +index_buffer: 1914 +index_buffer: 1916 +index_buffer: 1917 +index_buffer: 1914 +index_buffer: 1917 +index_buffer: 1918 +index_buffer: 1914 +index_buffer: 1918 +index_buffer: 1919 +index_buffer: 1920 +index_buffer: 1921 +index_buffer: 1918 +index_buffer: 1920 +index_buffer: 1918 +index_buffer: 1917 +index_buffer: 1916 +index_buffer: 1922 +index_buffer: 1920 +index_buffer: 1916 +index_buffer: 1920 +index_buffer: 1917 +index_buffer: 1923 +index_buffer: 1924 +index_buffer: 1925 +index_buffer: 1923 +index_buffer: 1925 +index_buffer: 1926 +index_buffer: 1924 +index_buffer: 1927 +index_buffer: 1928 +index_buffer: 1924 +index_buffer: 1928 +index_buffer: 1925 +index_buffer: 1929 +index_buffer: 1930 +index_buffer: 1931 +index_buffer: 1929 +index_buffer: 1931 +index_buffer: 1932 +index_buffer: 1933 +index_buffer: 1929 +index_buffer: 1932 +index_buffer: 1933 +index_buffer: 1932 +index_buffer: 1934 +index_buffer: 1934 +index_buffer: 1932 +index_buffer: 1935 +index_buffer: 1934 +index_buffer: 1935 +index_buffer: 1936 +index_buffer: 1932 +index_buffer: 1931 +index_buffer: 1937 +index_buffer: 1932 +index_buffer: 1937 +index_buffer: 1935 +index_buffer: 1938 +index_buffer: 1939 +index_buffer: 1940 +index_buffer: 1938 +index_buffer: 1940 +index_buffer: 1941 +index_buffer: 1941 +index_buffer: 1942 +index_buffer: 1943 +index_buffer: 1941 +index_buffer: 1943 +index_buffer: 1938 +index_buffer: 1941 +index_buffer: 1944 +index_buffer: 1945 +index_buffer: 1941 +index_buffer: 1945 +index_buffer: 1942 +index_buffer: 1941 +index_buffer: 1940 +index_buffer: 1946 +index_buffer: 1941 +index_buffer: 1946 +index_buffer: 1944 +index_buffer: 1947 +index_buffer: 1948 +index_buffer: 1910 +index_buffer: 1947 +index_buffer: 1910 +index_buffer: 1909 +index_buffer: 1948 +index_buffer: 1947 +index_buffer: 1949 +index_buffer: 1948 +index_buffer: 1949 +index_buffer: 1950 +index_buffer: 1951 +index_buffer: 1931 +index_buffer: 1930 +index_buffer: 1951 +index_buffer: 1930 +index_buffer: 1952 +index_buffer: 1931 +index_buffer: 1951 +index_buffer: 1953 +index_buffer: 1931 +index_buffer: 1953 +index_buffer: 1937 +index_buffer: 1954 +index_buffer: 1955 +index_buffer: 1953 +index_buffer: 1954 +index_buffer: 1953 +index_buffer: 1951 +index_buffer: 1956 +index_buffer: 1954 +index_buffer: 1951 +index_buffer: 1956 +index_buffer: 1951 +index_buffer: 1952 +index_buffer: 1957 +index_buffer: 1958 +index_buffer: 1959 +index_buffer: 1957 +index_buffer: 1959 +index_buffer: 1960 +index_buffer: 1959 +index_buffer: 1961 +index_buffer: 1962 +index_buffer: 1959 +index_buffer: 1962 +index_buffer: 1960 +index_buffer: 1963 +index_buffer: 1961 +index_buffer: 1959 +index_buffer: 1963 +index_buffer: 1959 +index_buffer: 1964 +index_buffer: 1964 +index_buffer: 1959 +index_buffer: 1958 +index_buffer: 1964 +index_buffer: 1958 +index_buffer: 1965 +index_buffer: 1966 +index_buffer: 1967 +index_buffer: 1968 +index_buffer: 1966 +index_buffer: 1968 +index_buffer: 1969 +index_buffer: 1969 +index_buffer: 1970 +index_buffer: 1971 +index_buffer: 1969 +index_buffer: 1971 +index_buffer: 1966 +index_buffer: 1970 +index_buffer: 1969 +index_buffer: 1972 +index_buffer: 1970 +index_buffer: 1972 +index_buffer: 1973 +index_buffer: 1969 +index_buffer: 1968 +index_buffer: 1974 +index_buffer: 1969 +index_buffer: 1974 +index_buffer: 1972 +index_buffer: 1949 +index_buffer: 1975 +index_buffer: 1976 +index_buffer: 1949 +index_buffer: 1976 +index_buffer: 1950 +index_buffer: 1975 +index_buffer: 1977 +index_buffer: 1978 +index_buffer: 1975 +index_buffer: 1978 +index_buffer: 1976 +index_buffer: 1979 +index_buffer: 1954 +index_buffer: 1956 +index_buffer: 1979 +index_buffer: 1956 +index_buffer: 1980 +index_buffer: 1981 +index_buffer: 1955 +index_buffer: 1954 +index_buffer: 1981 +index_buffer: 1954 +index_buffer: 1979 +index_buffer: 1982 +index_buffer: 1981 +index_buffer: 1979 +index_buffer: 1982 +index_buffer: 1979 +index_buffer: 1983 +index_buffer: 1983 +index_buffer: 1979 +index_buffer: 1980 +index_buffer: 1983 +index_buffer: 1980 +index_buffer: 1984 +index_buffer: 1985 +index_buffer: 1986 +index_buffer: 1964 +index_buffer: 1985 +index_buffer: 1964 +index_buffer: 1965 +index_buffer: 1986 +index_buffer: 1987 +index_buffer: 1963 +index_buffer: 1986 +index_buffer: 1963 +index_buffer: 1964 +index_buffer: 1988 +index_buffer: 1987 +index_buffer: 1986 +index_buffer: 1988 +index_buffer: 1986 +index_buffer: 1989 +index_buffer: 1990 +index_buffer: 1989 +index_buffer: 1986 +index_buffer: 1990 +index_buffer: 1986 +index_buffer: 1985 +index_buffer: 1972 +index_buffer: 1974 +index_buffer: 1991 +index_buffer: 1972 +index_buffer: 1991 +index_buffer: 1992 +index_buffer: 1973 +index_buffer: 1972 +index_buffer: 1992 +index_buffer: 1973 +index_buffer: 1992 +index_buffer: 1993 +index_buffer: 1993 +index_buffer: 1992 +index_buffer: 1994 +index_buffer: 1993 +index_buffer: 1994 +index_buffer: 1995 +index_buffer: 1992 +index_buffer: 1991 +index_buffer: 1996 +index_buffer: 1992 +index_buffer: 1996 +index_buffer: 1994 +index_buffer: 1978 +index_buffer: 1977 +index_buffer: 1997 +index_buffer: 1978 +index_buffer: 1997 +index_buffer: 1998 +index_buffer: 1998 +index_buffer: 1997 +index_buffer: 1923 +index_buffer: 1998 +index_buffer: 1923 +index_buffer: 1926 +index_buffer: 1983 +index_buffer: 1984 +index_buffer: 1999 +index_buffer: 1983 +index_buffer: 1999 +index_buffer: 2000 +index_buffer: 2001 +index_buffer: 1982 +index_buffer: 1983 +index_buffer: 2001 +index_buffer: 1983 +index_buffer: 2000 +index_buffer: 2002 +index_buffer: 2001 +index_buffer: 2000 +index_buffer: 2002 +index_buffer: 2000 +index_buffer: 2003 +index_buffer: 1999 +index_buffer: 2004 +index_buffer: 2003 +index_buffer: 1999 +index_buffer: 2003 +index_buffer: 2000 +index_buffer: 2005 +index_buffer: 1989 +index_buffer: 1990 +index_buffer: 2005 +index_buffer: 1990 +index_buffer: 2006 +index_buffer: 1988 +index_buffer: 1989 +index_buffer: 2005 +index_buffer: 1988 +index_buffer: 2005 +index_buffer: 2007 +index_buffer: 2008 +index_buffer: 2009 +index_buffer: 2007 +index_buffer: 2008 +index_buffer: 2007 +index_buffer: 2005 +index_buffer: 2005 +index_buffer: 2006 +index_buffer: 2010 +index_buffer: 2005 +index_buffer: 2010 +index_buffer: 2008 +index_buffer: 1994 +index_buffer: 1996 +index_buffer: 2011 +index_buffer: 1994 +index_buffer: 2011 +index_buffer: 2012 +index_buffer: 1995 +index_buffer: 1994 +index_buffer: 2012 +index_buffer: 1995 +index_buffer: 2012 +index_buffer: 2013 +index_buffer: 2013 +index_buffer: 2012 +index_buffer: 2014 +index_buffer: 2013 +index_buffer: 2014 +index_buffer: 2015 +index_buffer: 2011 +index_buffer: 2016 +index_buffer: 2014 +index_buffer: 2011 +index_buffer: 2014 +index_buffer: 2012 +index_buffer: 2017 +index_buffer: 1913 +index_buffer: 1912 +index_buffer: 2017 +index_buffer: 1912 +index_buffer: 2018 +index_buffer: 2019 +index_buffer: 2017 +index_buffer: 2018 +index_buffer: 2019 +index_buffer: 2018 +index_buffer: 2020 +index_buffer: 2021 +index_buffer: 2022 +index_buffer: 1908 +index_buffer: 2021 +index_buffer: 1908 +index_buffer: 1913 +index_buffer: 2023 +index_buffer: 1909 +index_buffer: 1908 +index_buffer: 2023 +index_buffer: 1908 +index_buffer: 2022 +index_buffer: 2024 +index_buffer: 2023 +index_buffer: 2022 +index_buffer: 2024 +index_buffer: 2022 +index_buffer: 2025 +index_buffer: 2025 +index_buffer: 2022 +index_buffer: 2021 +index_buffer: 2025 +index_buffer: 2021 +index_buffer: 2026 +index_buffer: 2027 +index_buffer: 2028 +index_buffer: 1933 +index_buffer: 2027 +index_buffer: 1933 +index_buffer: 1934 +index_buffer: 2029 +index_buffer: 2028 +index_buffer: 2027 +index_buffer: 2029 +index_buffer: 2027 +index_buffer: 2030 +index_buffer: 2031 +index_buffer: 2030 +index_buffer: 2027 +index_buffer: 2031 +index_buffer: 2027 +index_buffer: 2032 +index_buffer: 1934 +index_buffer: 1936 +index_buffer: 2032 +index_buffer: 1934 +index_buffer: 2032 +index_buffer: 2027 +index_buffer: 2033 +index_buffer: 2034 +index_buffer: 1919 +index_buffer: 2033 +index_buffer: 1919 +index_buffer: 1918 +index_buffer: 2033 +index_buffer: 1918 +index_buffer: 1921 +index_buffer: 2033 +index_buffer: 1921 +index_buffer: 2035 +index_buffer: 1919 +index_buffer: 2036 +index_buffer: 2037 +index_buffer: 1919 +index_buffer: 2037 +index_buffer: 1914 +index_buffer: 1943 +index_buffer: 1915 +index_buffer: 1914 +index_buffer: 1943 +index_buffer: 1914 +index_buffer: 2037 +index_buffer: 2038 +index_buffer: 2039 +index_buffer: 2040 +index_buffer: 2038 +index_buffer: 2040 +index_buffer: 2041 +index_buffer: 2042 +index_buffer: 1939 +index_buffer: 2039 +index_buffer: 2042 +index_buffer: 2039 +index_buffer: 2038 +index_buffer: 2043 +index_buffer: 2044 +index_buffer: 2042 +index_buffer: 2043 +index_buffer: 2042 +index_buffer: 2038 +index_buffer: 2041 +index_buffer: 2045 +index_buffer: 2043 +index_buffer: 2041 +index_buffer: 2043 +index_buffer: 2038 +index_buffer: 1912 +index_buffer: 1911 +index_buffer: 2046 +index_buffer: 1912 +index_buffer: 2046 +index_buffer: 2047 +index_buffer: 2047 +index_buffer: 2046 +index_buffer: 1929 +index_buffer: 2047 +index_buffer: 1929 +index_buffer: 1933 +index_buffer: 2046 +index_buffer: 2048 +index_buffer: 1930 +index_buffer: 2046 +index_buffer: 1930 +index_buffer: 1929 +index_buffer: 1911 +index_buffer: 1910 +index_buffer: 2048 +index_buffer: 1911 +index_buffer: 2048 +index_buffer: 2046 +index_buffer: 1926 +index_buffer: 2049 +index_buffer: 2050 +index_buffer: 1926 +index_buffer: 2050 +index_buffer: 2051 +index_buffer: 2052 +index_buffer: 2003 +index_buffer: 2004 +index_buffer: 2052 +index_buffer: 2004 +index_buffer: 2051 +index_buffer: 2053 +index_buffer: 2002 +index_buffer: 2003 +index_buffer: 2053 +index_buffer: 2003 +index_buffer: 2052 +index_buffer: 2050 +index_buffer: 2049 +index_buffer: 2054 +index_buffer: 2050 +index_buffer: 2054 +index_buffer: 2055 +index_buffer: 1910 +index_buffer: 1948 +index_buffer: 2056 +index_buffer: 1910 +index_buffer: 2056 +index_buffer: 2048 +index_buffer: 2048 +index_buffer: 2056 +index_buffer: 1952 +index_buffer: 2048 +index_buffer: 1952 +index_buffer: 1930 +index_buffer: 2057 +index_buffer: 1956 +index_buffer: 1952 +index_buffer: 2057 +index_buffer: 1952 +index_buffer: 2056 +index_buffer: 2056 +index_buffer: 1948 +index_buffer: 1950 +index_buffer: 2056 +index_buffer: 1950 +index_buffer: 2057 +index_buffer: 1950 +index_buffer: 1976 +index_buffer: 2058 +index_buffer: 1950 +index_buffer: 2058 +index_buffer: 2057 +index_buffer: 1980 +index_buffer: 1956 +index_buffer: 2057 +index_buffer: 1980 +index_buffer: 2057 +index_buffer: 2058 +index_buffer: 2059 +index_buffer: 1984 +index_buffer: 1980 +index_buffer: 2059 +index_buffer: 1980 +index_buffer: 2058 +index_buffer: 1978 +index_buffer: 2059 +index_buffer: 2058 +index_buffer: 1978 +index_buffer: 2058 +index_buffer: 1976 +index_buffer: 1978 +index_buffer: 1998 +index_buffer: 2060 +index_buffer: 1978 +index_buffer: 2060 +index_buffer: 2059 +index_buffer: 1999 +index_buffer: 1984 +index_buffer: 2059 +index_buffer: 1999 +index_buffer: 2059 +index_buffer: 2060 +index_buffer: 2004 +index_buffer: 1999 +index_buffer: 2060 +index_buffer: 2004 +index_buffer: 2060 +index_buffer: 2051 +index_buffer: 2060 +index_buffer: 1998 +index_buffer: 1926 +index_buffer: 2060 +index_buffer: 1926 +index_buffer: 2051 +index_buffer: 2036 +index_buffer: 2061 +index_buffer: 2062 +index_buffer: 2036 +index_buffer: 2062 +index_buffer: 2063 +index_buffer: 2063 +index_buffer: 2062 +index_buffer: 2064 +index_buffer: 2063 +index_buffer: 2064 +index_buffer: 2040 +index_buffer: 2037 +index_buffer: 2065 +index_buffer: 1938 +index_buffer: 2037 +index_buffer: 1938 +index_buffer: 1943 +index_buffer: 2039 +index_buffer: 1939 +index_buffer: 1938 +index_buffer: 2039 +index_buffer: 1938 +index_buffer: 2065 +index_buffer: 2039 +index_buffer: 2065 +index_buffer: 2063 +index_buffer: 2039 +index_buffer: 2063 +index_buffer: 2040 +index_buffer: 2037 +index_buffer: 2036 +index_buffer: 2063 +index_buffer: 2037 +index_buffer: 2063 +index_buffer: 2065 +index_buffer: 2066 +index_buffer: 1915 +index_buffer: 1943 +index_buffer: 2066 +index_buffer: 1943 +index_buffer: 1942 +index_buffer: 2067 +index_buffer: 2066 +index_buffer: 1942 +index_buffer: 2067 +index_buffer: 1942 +index_buffer: 1945 +index_buffer: 2068 +index_buffer: 1962 +index_buffer: 1961 +index_buffer: 2068 +index_buffer: 1961 +index_buffer: 2069 +index_buffer: 1961 +index_buffer: 1963 +index_buffer: 2070 +index_buffer: 1961 +index_buffer: 2070 +index_buffer: 2069 +index_buffer: 2070 +index_buffer: 1963 +index_buffer: 1987 +index_buffer: 2070 +index_buffer: 1987 +index_buffer: 2071 +index_buffer: 2071 +index_buffer: 1987 +index_buffer: 1988 +index_buffer: 2071 +index_buffer: 1988 +index_buffer: 2072 +index_buffer: 2072 +index_buffer: 1988 +index_buffer: 2007 +index_buffer: 2072 +index_buffer: 2007 +index_buffer: 2073 +index_buffer: 2074 +index_buffer: 2073 +index_buffer: 2007 +index_buffer: 2074 +index_buffer: 2007 +index_buffer: 2009 +index_buffer: 2075 +index_buffer: 2076 +index_buffer: 2077 +index_buffer: 2075 +index_buffer: 2077 +index_buffer: 2078 +index_buffer: 2079 +index_buffer: 2076 +index_buffer: 2075 +index_buffer: 2079 +index_buffer: 2075 +index_buffer: 2080 +index_buffer: 2081 +index_buffer: 2082 +index_buffer: 2080 +index_buffer: 2081 +index_buffer: 2080 +index_buffer: 2075 +index_buffer: 2081 +index_buffer: 2075 +index_buffer: 2078 +index_buffer: 2081 +index_buffer: 2078 +index_buffer: 2083 +index_buffer: 2084 +index_buffer: 2085 +index_buffer: 2074 +index_buffer: 2084 +index_buffer: 2074 +index_buffer: 2009 +index_buffer: 2015 +index_buffer: 2014 +index_buffer: 2086 +index_buffer: 2015 +index_buffer: 2086 +index_buffer: 2087 +index_buffer: 2088 +index_buffer: 2089 +index_buffer: 2090 +index_buffer: 2088 +index_buffer: 2090 +index_buffer: 2091 +index_buffer: 2088 +index_buffer: 2091 +index_buffer: 2092 +index_buffer: 2088 +index_buffer: 2092 +index_buffer: 2093 +index_buffer: 2091 +index_buffer: 2094 +index_buffer: 2095 +index_buffer: 2091 +index_buffer: 2095 +index_buffer: 2092 +index_buffer: 2090 +index_buffer: 2096 +index_buffer: 2094 +index_buffer: 2090 +index_buffer: 2094 +index_buffer: 2091 +index_buffer: 2097 +index_buffer: 2098 +index_buffer: 2099 +index_buffer: 2097 +index_buffer: 2099 +index_buffer: 2100 +index_buffer: 2097 +index_buffer: 2100 +index_buffer: 2101 +index_buffer: 2097 +index_buffer: 2101 +index_buffer: 2102 +index_buffer: 2103 +index_buffer: 2104 +index_buffer: 2101 +index_buffer: 2103 +index_buffer: 2101 +index_buffer: 2100 +index_buffer: 2105 +index_buffer: 2103 +index_buffer: 2100 +index_buffer: 2105 +index_buffer: 2100 +index_buffer: 2099 +index_buffer: 2096 +index_buffer: 2106 +index_buffer: 2107 +index_buffer: 2096 +index_buffer: 2107 +index_buffer: 2094 +index_buffer: 2108 +index_buffer: 2095 +index_buffer: 2094 +index_buffer: 2108 +index_buffer: 2094 +index_buffer: 2107 +index_buffer: 2109 +index_buffer: 2108 +index_buffer: 2107 +index_buffer: 2109 +index_buffer: 2107 +index_buffer: 2110 +index_buffer: 2110 +index_buffer: 2107 +index_buffer: 2106 +index_buffer: 2110 +index_buffer: 2106 +index_buffer: 2111 +index_buffer: 1970 +index_buffer: 2112 +index_buffer: 2024 +index_buffer: 1970 +index_buffer: 2024 +index_buffer: 1971 +index_buffer: 2112 +index_buffer: 1970 +index_buffer: 1973 +index_buffer: 2112 +index_buffer: 1973 +index_buffer: 2113 +index_buffer: 2114 +index_buffer: 2110 +index_buffer: 2111 +index_buffer: 2114 +index_buffer: 2111 +index_buffer: 2115 +index_buffer: 2116 +index_buffer: 2109 +index_buffer: 2110 +index_buffer: 2116 +index_buffer: 2110 +index_buffer: 2114 +index_buffer: 2117 +index_buffer: 2116 +index_buffer: 2114 +index_buffer: 2117 +index_buffer: 2114 +index_buffer: 2118 +index_buffer: 2118 +index_buffer: 2114 +index_buffer: 2115 +index_buffer: 2118 +index_buffer: 2115 +index_buffer: 2119 +index_buffer: 2113 +index_buffer: 1973 +index_buffer: 1993 +index_buffer: 2113 +index_buffer: 1993 +index_buffer: 2120 +index_buffer: 2120 +index_buffer: 1993 +index_buffer: 1995 +index_buffer: 2120 +index_buffer: 1995 +index_buffer: 2121 +index_buffer: 2122 +index_buffer: 2118 +index_buffer: 2119 +index_buffer: 2122 +index_buffer: 2119 +index_buffer: 2123 +index_buffer: 2124 +index_buffer: 2117 +index_buffer: 2118 +index_buffer: 2124 +index_buffer: 2118 +index_buffer: 2122 +index_buffer: 2125 +index_buffer: 2126 +index_buffer: 2124 +index_buffer: 2125 +index_buffer: 2124 +index_buffer: 2122 +index_buffer: 2125 +index_buffer: 2122 +index_buffer: 2123 +index_buffer: 2125 +index_buffer: 2123 +index_buffer: 2127 +index_buffer: 2121 +index_buffer: 1995 +index_buffer: 2013 +index_buffer: 2121 +index_buffer: 2013 +index_buffer: 2128 +index_buffer: 2128 +index_buffer: 2013 +index_buffer: 2015 +index_buffer: 2128 +index_buffer: 2015 +index_buffer: 1927 +index_buffer: 1966 +index_buffer: 1971 +index_buffer: 2129 +index_buffer: 1966 +index_buffer: 2129 +index_buffer: 2130 +index_buffer: 1967 +index_buffer: 1966 +index_buffer: 2130 +index_buffer: 1967 +index_buffer: 2130 +index_buffer: 2131 +index_buffer: 2131 +index_buffer: 2130 +index_buffer: 2132 +index_buffer: 2131 +index_buffer: 2132 +index_buffer: 2133 +index_buffer: 2129 +index_buffer: 2134 +index_buffer: 2132 +index_buffer: 2129 +index_buffer: 2132 +index_buffer: 2130 +index_buffer: 2135 +index_buffer: 2136 +index_buffer: 2137 +index_buffer: 2135 +index_buffer: 2137 +index_buffer: 2138 +index_buffer: 2138 +index_buffer: 2137 +index_buffer: 2139 +index_buffer: 2138 +index_buffer: 2139 +index_buffer: 2140 +index_buffer: 2132 +index_buffer: 2134 +index_buffer: 2141 +index_buffer: 2132 +index_buffer: 2141 +index_buffer: 2142 +index_buffer: 2143 +index_buffer: 2133 +index_buffer: 2132 +index_buffer: 2143 +index_buffer: 2132 +index_buffer: 2142 +index_buffer: 2142 +index_buffer: 2144 +index_buffer: 2145 +index_buffer: 2142 +index_buffer: 2145 +index_buffer: 2143 +index_buffer: 2144 +index_buffer: 2142 +index_buffer: 2141 +index_buffer: 2144 +index_buffer: 2141 +index_buffer: 2146 +index_buffer: 2147 +index_buffer: 2148 +index_buffer: 2149 +index_buffer: 2147 +index_buffer: 2149 +index_buffer: 2150 +index_buffer: 2151 +index_buffer: 2152 +index_buffer: 2149 +index_buffer: 2151 +index_buffer: 2149 +index_buffer: 2148 +index_buffer: 2148 +index_buffer: 2138 +index_buffer: 2140 +index_buffer: 2148 +index_buffer: 2140 +index_buffer: 2151 +index_buffer: 2135 +index_buffer: 2138 +index_buffer: 2148 +index_buffer: 2135 +index_buffer: 2148 +index_buffer: 2147 +index_buffer: 2153 +index_buffer: 2125 +index_buffer: 2127 +index_buffer: 2153 +index_buffer: 2127 +index_buffer: 2154 +index_buffer: 2010 +index_buffer: 2155 +index_buffer: 2156 +index_buffer: 2010 +index_buffer: 2156 +index_buffer: 2008 +index_buffer: 2008 +index_buffer: 2156 +index_buffer: 2084 +index_buffer: 2008 +index_buffer: 2084 +index_buffer: 2009 +index_buffer: 2157 +index_buffer: 2158 +index_buffer: 2159 +index_buffer: 2157 +index_buffer: 2159 +index_buffer: 2160 +index_buffer: 2161 +index_buffer: 2162 +index_buffer: 2098 +index_buffer: 2161 +index_buffer: 2098 +index_buffer: 2097 +index_buffer: 2102 +index_buffer: 2163 +index_buffer: 2161 +index_buffer: 2102 +index_buffer: 2161 +index_buffer: 2097 +index_buffer: 1920 +index_buffer: 2078 +index_buffer: 2077 +index_buffer: 1920 +index_buffer: 2077 +index_buffer: 1921 +index_buffer: 2083 +index_buffer: 2078 +index_buffer: 1920 +index_buffer: 2083 +index_buffer: 1920 +index_buffer: 1922 +index_buffer: 1927 +index_buffer: 2015 +index_buffer: 2087 +index_buffer: 1927 +index_buffer: 2087 +index_buffer: 1928 +index_buffer: 1936 +index_buffer: 1935 +index_buffer: 2090 +index_buffer: 1936 +index_buffer: 2090 +index_buffer: 2089 +index_buffer: 1935 +index_buffer: 1937 +index_buffer: 2096 +index_buffer: 1935 +index_buffer: 2096 +index_buffer: 2090 +index_buffer: 1944 +index_buffer: 2099 +index_buffer: 2098 +index_buffer: 1944 +index_buffer: 2098 +index_buffer: 1945 +index_buffer: 2105 +index_buffer: 2099 +index_buffer: 1944 +index_buffer: 2105 +index_buffer: 1944 +index_buffer: 1946 +index_buffer: 1953 +index_buffer: 2106 +index_buffer: 2096 +index_buffer: 1953 +index_buffer: 2096 +index_buffer: 1937 +index_buffer: 2111 +index_buffer: 2106 +index_buffer: 1953 +index_buffer: 2111 +index_buffer: 1953 +index_buffer: 1955 +index_buffer: 2115 +index_buffer: 2111 +index_buffer: 1955 +index_buffer: 2115 +index_buffer: 1955 +index_buffer: 1981 +index_buffer: 2119 +index_buffer: 2115 +index_buffer: 1981 +index_buffer: 2119 +index_buffer: 1981 +index_buffer: 1982 +index_buffer: 2123 +index_buffer: 2119 +index_buffer: 1982 +index_buffer: 2123 +index_buffer: 1982 +index_buffer: 2001 +index_buffer: 2127 +index_buffer: 2123 +index_buffer: 2001 +index_buffer: 2127 +index_buffer: 2001 +index_buffer: 2002 +index_buffer: 1971 +index_buffer: 2024 +index_buffer: 2025 +index_buffer: 1971 +index_buffer: 2025 +index_buffer: 2129 +index_buffer: 2129 +index_buffer: 2025 +index_buffer: 2026 +index_buffer: 2129 +index_buffer: 2026 +index_buffer: 2134 +index_buffer: 2135 +index_buffer: 2045 +index_buffer: 2164 +index_buffer: 2135 +index_buffer: 2164 +index_buffer: 2136 +index_buffer: 2134 +index_buffer: 2026 +index_buffer: 2165 +index_buffer: 2134 +index_buffer: 2165 +index_buffer: 2141 +index_buffer: 2141 +index_buffer: 2165 +index_buffer: 2166 +index_buffer: 2141 +index_buffer: 2166 +index_buffer: 2146 +index_buffer: 2043 +index_buffer: 2147 +index_buffer: 2150 +index_buffer: 2043 +index_buffer: 2150 +index_buffer: 2044 +index_buffer: 2135 +index_buffer: 2147 +index_buffer: 2043 +index_buffer: 2135 +index_buffer: 2043 +index_buffer: 2045 +index_buffer: 2154 +index_buffer: 2127 +index_buffer: 2002 +index_buffer: 2154 +index_buffer: 2002 +index_buffer: 2053 +index_buffer: 2160 +index_buffer: 2159 +index_buffer: 2055 +index_buffer: 2160 +index_buffer: 2055 +index_buffer: 2054 +index_buffer: 2067 +index_buffer: 1945 +index_buffer: 2098 +index_buffer: 2067 +index_buffer: 2098 +index_buffer: 2162 +index_buffer: 2024 +index_buffer: 2112 +index_buffer: 2167 +index_buffer: 2024 +index_buffer: 2167 +index_buffer: 2023 +index_buffer: 1947 +index_buffer: 1909 +index_buffer: 2023 +index_buffer: 1947 +index_buffer: 2023 +index_buffer: 2167 +index_buffer: 2167 +index_buffer: 2168 +index_buffer: 1949 +index_buffer: 2167 +index_buffer: 1949 +index_buffer: 1947 +index_buffer: 2112 +index_buffer: 2113 +index_buffer: 2168 +index_buffer: 2112 +index_buffer: 2168 +index_buffer: 2167 +index_buffer: 2168 +index_buffer: 2113 +index_buffer: 2120 +index_buffer: 2168 +index_buffer: 2120 +index_buffer: 2169 +index_buffer: 1949 +index_buffer: 2168 +index_buffer: 2169 +index_buffer: 1949 +index_buffer: 2169 +index_buffer: 1975 +index_buffer: 1975 +index_buffer: 2169 +index_buffer: 2170 +index_buffer: 1975 +index_buffer: 2170 +index_buffer: 1977 +index_buffer: 2169 +index_buffer: 2120 +index_buffer: 2121 +index_buffer: 2169 +index_buffer: 2121 +index_buffer: 2170 +index_buffer: 2170 +index_buffer: 2121 +index_buffer: 2128 +index_buffer: 2170 +index_buffer: 2128 +index_buffer: 2171 +index_buffer: 1997 +index_buffer: 1977 +index_buffer: 2170 +index_buffer: 1997 +index_buffer: 2170 +index_buffer: 2171 +index_buffer: 1924 +index_buffer: 1923 +index_buffer: 1997 +index_buffer: 1924 +index_buffer: 1997 +index_buffer: 2171 +index_buffer: 2171 +index_buffer: 2128 +index_buffer: 1927 +index_buffer: 2171 +index_buffer: 1927 +index_buffer: 1924 +index_buffer: 1919 +index_buffer: 2034 +index_buffer: 2061 +index_buffer: 1919 +index_buffer: 2061 +index_buffer: 2036 +index_buffer: 2041 +index_buffer: 2040 +index_buffer: 2064 +index_buffer: 2041 +index_buffer: 2064 +index_buffer: 2172 +index_buffer: 2045 +index_buffer: 2041 +index_buffer: 2172 +index_buffer: 2045 +index_buffer: 2172 +index_buffer: 2164 +index_buffer: 2034 +index_buffer: 2033 +index_buffer: 2173 +index_buffer: 2034 +index_buffer: 2173 +index_buffer: 2019 +index_buffer: 2035 +index_buffer: 2166 +index_buffer: 2173 +index_buffer: 2035 +index_buffer: 2173 +index_buffer: 2033 +index_buffer: 2062 +index_buffer: 2174 +index_buffer: 2029 +index_buffer: 2062 +index_buffer: 2029 +index_buffer: 2064 +index_buffer: 2061 +index_buffer: 2020 +index_buffer: 2174 +index_buffer: 2061 +index_buffer: 2174 +index_buffer: 2062 +index_buffer: 2175 +index_buffer: 2176 +index_buffer: 2139 +index_buffer: 2175 +index_buffer: 2139 +index_buffer: 2137 +index_buffer: 2177 +index_buffer: 2175 +index_buffer: 2137 +index_buffer: 2177 +index_buffer: 2137 +index_buffer: 2136 +index_buffer: 2178 +index_buffer: 2179 +index_buffer: 2076 +index_buffer: 2178 +index_buffer: 2076 +index_buffer: 2079 +index_buffer: 2179 +index_buffer: 2180 +index_buffer: 2077 +index_buffer: 2179 +index_buffer: 2077 +index_buffer: 2076 +index_buffer: 2177 +index_buffer: 2136 +index_buffer: 2164 +index_buffer: 2177 +index_buffer: 2164 +index_buffer: 2031 +index_buffer: 2035 +index_buffer: 1921 +index_buffer: 2077 +index_buffer: 2035 +index_buffer: 2077 +index_buffer: 2180 +index_buffer: 2034 +index_buffer: 2019 +index_buffer: 2020 +index_buffer: 2034 +index_buffer: 2020 +index_buffer: 2061 +index_buffer: 2030 +index_buffer: 2172 +index_buffer: 2064 +index_buffer: 2030 +index_buffer: 2064 +index_buffer: 2029 +index_buffer: 2031 +index_buffer: 2164 +index_buffer: 2172 +index_buffer: 2031 +index_buffer: 2172 +index_buffer: 2030 +index_buffer: 2178 +index_buffer: 2145 +index_buffer: 2144 +index_buffer: 2178 +index_buffer: 2144 +index_buffer: 2179 +index_buffer: 2144 +index_buffer: 2146 +index_buffer: 2180 +index_buffer: 2144 +index_buffer: 2180 +index_buffer: 2179 +index_buffer: 2035 +index_buffer: 2180 +index_buffer: 2146 +index_buffer: 2035 +index_buffer: 2146 +index_buffer: 2166 +index_buffer: 2181 +index_buffer: 2017 +index_buffer: 2019 +index_buffer: 2181 +index_buffer: 2019 +index_buffer: 2173 +index_buffer: 2021 +index_buffer: 1913 +index_buffer: 2017 +index_buffer: 2021 +index_buffer: 2017 +index_buffer: 2181 +index_buffer: 2165 +index_buffer: 2026 +index_buffer: 2021 +index_buffer: 2165 +index_buffer: 2021 +index_buffer: 2181 +index_buffer: 2173 +index_buffer: 2166 +index_buffer: 2165 +index_buffer: 2173 +index_buffer: 2165 +index_buffer: 2181 +index_buffer: 2182 +index_buffer: 2028 +index_buffer: 2029 +index_buffer: 2182 +index_buffer: 2029 +index_buffer: 2174 +index_buffer: 2047 +index_buffer: 1933 +index_buffer: 2028 +index_buffer: 2047 +index_buffer: 2028 +index_buffer: 2182 +index_buffer: 2018 +index_buffer: 1912 +index_buffer: 2047 +index_buffer: 2018 +index_buffer: 2047 +index_buffer: 2182 +index_buffer: 2020 +index_buffer: 2018 +index_buffer: 2182 +index_buffer: 2020 +index_buffer: 2182 +index_buffer: 2174 +index_buffer: 2176 +index_buffer: 2175 +index_buffer: 2183 +index_buffer: 2176 +index_buffer: 2183 +index_buffer: 2184 +index_buffer: 2088 +index_buffer: 2093 +index_buffer: 2184 +index_buffer: 2088 +index_buffer: 2184 +index_buffer: 2183 +index_buffer: 2185 +index_buffer: 2089 +index_buffer: 2088 +index_buffer: 2185 +index_buffer: 2088 +index_buffer: 2183 +index_buffer: 2185 +index_buffer: 2183 +index_buffer: 2175 +index_buffer: 2185 +index_buffer: 2175 +index_buffer: 2177 +index_buffer: 2185 +index_buffer: 2177 +index_buffer: 2031 +index_buffer: 2185 +index_buffer: 2031 +index_buffer: 2032 +index_buffer: 2032 +index_buffer: 1936 +index_buffer: 2089 +index_buffer: 2032 +index_buffer: 2089 +index_buffer: 2185 +index_buffer: 2068 +index_buffer: 2186 +index_buffer: 2187 +index_buffer: 2068 +index_buffer: 2187 +index_buffer: 1962 +index_buffer: 1960 +index_buffer: 1962 +index_buffer: 2187 +index_buffer: 1960 +index_buffer: 2187 +index_buffer: 2188 +index_buffer: 2188 +index_buffer: 2187 +index_buffer: 2189 +index_buffer: 2188 +index_buffer: 2189 +index_buffer: 2190 +index_buffer: 2186 +index_buffer: 2191 +index_buffer: 2189 +index_buffer: 2186 +index_buffer: 2189 +index_buffer: 2187 +index_buffer: 2192 +index_buffer: 2190 +index_buffer: 2193 +index_buffer: 2192 +index_buffer: 2193 +index_buffer: 2194 +index_buffer: 2193 +index_buffer: 2195 +index_buffer: 2196 +index_buffer: 2193 +index_buffer: 2196 +index_buffer: 2194 +index_buffer: 2193 +index_buffer: 2197 +index_buffer: 2198 +index_buffer: 2193 +index_buffer: 2198 +index_buffer: 2195 +index_buffer: 2190 +index_buffer: 2189 +index_buffer: 2197 +index_buffer: 2190 +index_buffer: 2197 +index_buffer: 2193 +index_buffer: 2195 +index_buffer: 2198 +index_buffer: 2199 +index_buffer: 2195 +index_buffer: 2199 +index_buffer: 2200 +index_buffer: 2197 +index_buffer: 2201 +index_buffer: 2202 +index_buffer: 2197 +index_buffer: 2202 +index_buffer: 2198 +index_buffer: 2197 +index_buffer: 2189 +index_buffer: 2191 +index_buffer: 2197 +index_buffer: 2191 +index_buffer: 2201 +index_buffer: 1957 +index_buffer: 1960 +index_buffer: 2188 +index_buffer: 1957 +index_buffer: 2188 +index_buffer: 2203 +index_buffer: 2192 +index_buffer: 2203 +index_buffer: 2188 +index_buffer: 2192 +index_buffer: 2188 +index_buffer: 2190 +index_buffer: 2200 +index_buffer: 2199 +index_buffer: 2204 +index_buffer: 2200 +index_buffer: 2204 +index_buffer: 2205 +index_buffer: 2206 +index_buffer: 2207 +index_buffer: 2208 +index_buffer: 2206 +index_buffer: 2208 +index_buffer: 2209 +index_buffer: 2205 +index_buffer: 2204 +index_buffer: 2208 +index_buffer: 2205 +index_buffer: 2208 +index_buffer: 2207 +index_buffer: 2210 +index_buffer: 2211 +index_buffer: 2209 +index_buffer: 2210 +index_buffer: 2209 +index_buffer: 2208 +index_buffer: 2208 +index_buffer: 2204 +index_buffer: 2212 +index_buffer: 2208 +index_buffer: 2212 +index_buffer: 2210 +index_buffer: 2199 +index_buffer: 2213 +index_buffer: 2212 +index_buffer: 2199 +index_buffer: 2212 +index_buffer: 2204 +index_buffer: 2198 +index_buffer: 2202 +index_buffer: 2213 +index_buffer: 2198 +index_buffer: 2213 +index_buffer: 2199 +index_buffer: 2196 +index_buffer: 2195 +index_buffer: 2200 +index_buffer: 2196 +index_buffer: 2200 +index_buffer: 2214 +index_buffer: 2200 +index_buffer: 2205 +index_buffer: 2215 +index_buffer: 2200 +index_buffer: 2215 +index_buffer: 2214 +index_buffer: 2205 +index_buffer: 2207 +index_buffer: 2216 +index_buffer: 2205 +index_buffer: 2216 +index_buffer: 2215 +index_buffer: 2206 +index_buffer: 2217 +index_buffer: 2216 +index_buffer: 2206 +index_buffer: 2216 +index_buffer: 2207 +index_buffer: 1748 +index_buffer: 2218 +index_buffer: 2219 +index_buffer: 1748 +index_buffer: 2219 +index_buffer: 1749 +index_buffer: 1751 +index_buffer: 2220 +index_buffer: 2218 +index_buffer: 1751 +index_buffer: 2218 +index_buffer: 1748 +index_buffer: 2221 +index_buffer: 2222 +index_buffer: 2223 +index_buffer: 2221 +index_buffer: 2223 +index_buffer: 2224 +index_buffer: 2225 +index_buffer: 2226 +index_buffer: 2222 +index_buffer: 2225 +index_buffer: 2222 +index_buffer: 2221 +index_buffer: 2222 +index_buffer: 2227 +index_buffer: 2228 +index_buffer: 2222 +index_buffer: 2228 +index_buffer: 2223 +index_buffer: 2229 +index_buffer: 2227 +index_buffer: 2222 +index_buffer: 2229 +index_buffer: 2222 +index_buffer: 2226 +index_buffer: 2227 +index_buffer: 2230 +index_buffer: 2231 +index_buffer: 2227 +index_buffer: 2231 +index_buffer: 2228 +index_buffer: 2229 +index_buffer: 2232 +index_buffer: 2230 +index_buffer: 2229 +index_buffer: 2230 +index_buffer: 2227 +index_buffer: 2232 +index_buffer: 2233 +index_buffer: 2234 +index_buffer: 2232 +index_buffer: 2234 +index_buffer: 2230 +index_buffer: 2235 +index_buffer: 2231 +index_buffer: 2230 +index_buffer: 2235 +index_buffer: 2230 +index_buffer: 2234 +index_buffer: 2234 +index_buffer: 2236 +index_buffer: 2237 +index_buffer: 2234 +index_buffer: 2237 +index_buffer: 2235 +index_buffer: 2233 +index_buffer: 2238 +index_buffer: 2236 +index_buffer: 2233 +index_buffer: 2236 +index_buffer: 2234 +index_buffer: 2238 +index_buffer: 2239 +index_buffer: 2240 +index_buffer: 2238 +index_buffer: 2240 +index_buffer: 2236 +index_buffer: 2236 +index_buffer: 2240 +index_buffer: 2241 +index_buffer: 2236 +index_buffer: 2241 +index_buffer: 2237 +index_buffer: 2240 +index_buffer: 2242 +index_buffer: 2243 +index_buffer: 2240 +index_buffer: 2243 +index_buffer: 2241 +index_buffer: 2239 +index_buffer: 2244 +index_buffer: 2242 +index_buffer: 2239 +index_buffer: 2242 +index_buffer: 2240 +index_buffer: 2245 +index_buffer: 2242 +index_buffer: 2244 +index_buffer: 2245 +index_buffer: 2244 +index_buffer: 2246 +index_buffer: 2242 +index_buffer: 2245 +index_buffer: 2247 +index_buffer: 2242 +index_buffer: 2247 +index_buffer: 2243 +index_buffer: 2247 +index_buffer: 2245 +index_buffer: 2248 +index_buffer: 2247 +index_buffer: 2248 +index_buffer: 2249 +index_buffer: 2246 +index_buffer: 2250 +index_buffer: 2248 +index_buffer: 2246 +index_buffer: 2248 +index_buffer: 2245 +index_buffer: 2250 +index_buffer: 2251 +index_buffer: 2252 +index_buffer: 2250 +index_buffer: 2252 +index_buffer: 2248 +index_buffer: 2248 +index_buffer: 2252 +index_buffer: 2253 +index_buffer: 2248 +index_buffer: 2253 +index_buffer: 2249 +index_buffer: 2218 +index_buffer: 2221 +index_buffer: 2224 +index_buffer: 2218 +index_buffer: 2224 +index_buffer: 2219 +index_buffer: 2220 +index_buffer: 2225 +index_buffer: 2221 +index_buffer: 2220 +index_buffer: 2221 +index_buffer: 2218 +index_buffer: 2254 +index_buffer: 2253 +index_buffer: 2252 +index_buffer: 2254 +index_buffer: 2252 +index_buffer: 2255 +index_buffer: 2252 +index_buffer: 2251 +index_buffer: 2256 +index_buffer: 2252 +index_buffer: 2256 +index_buffer: 2255 +index_buffer: 2255 +index_buffer: 2256 +index_buffer: 2257 +index_buffer: 2255 +index_buffer: 2257 +index_buffer: 2258 +index_buffer: 2255 +index_buffer: 2258 +index_buffer: 2259 +index_buffer: 2255 +index_buffer: 2259 +index_buffer: 2254 +index_buffer: 1751 +index_buffer: 1792 +index_buffer: 2260 +index_buffer: 1751 +index_buffer: 2260 +index_buffer: 2220 +index_buffer: 2260 +index_buffer: 1792 +index_buffer: 1793 +index_buffer: 2260 +index_buffer: 1793 +index_buffer: 2261 +index_buffer: 2219 +index_buffer: 2262 +index_buffer: 1796 +index_buffer: 2219 +index_buffer: 1796 +index_buffer: 1749 +index_buffer: 1797 +index_buffer: 1796 +index_buffer: 2262 +index_buffer: 1797 +index_buffer: 2262 +index_buffer: 2263 +index_buffer: 2264 +index_buffer: 2226 +index_buffer: 2225 +index_buffer: 2264 +index_buffer: 2225 +index_buffer: 2265 +index_buffer: 2266 +index_buffer: 2264 +index_buffer: 2265 +index_buffer: 2266 +index_buffer: 2265 +index_buffer: 2267 +index_buffer: 2223 +index_buffer: 2268 +index_buffer: 2269 +index_buffer: 2223 +index_buffer: 2269 +index_buffer: 2224 +index_buffer: 2268 +index_buffer: 2270 +index_buffer: 2271 +index_buffer: 2268 +index_buffer: 2271 +index_buffer: 2269 +index_buffer: 2264 +index_buffer: 2272 +index_buffer: 2229 +index_buffer: 2264 +index_buffer: 2229 +index_buffer: 2226 +index_buffer: 2264 +index_buffer: 2266 +index_buffer: 2273 +index_buffer: 2264 +index_buffer: 2273 +index_buffer: 2272 +index_buffer: 2228 +index_buffer: 2274 +index_buffer: 2268 +index_buffer: 2228 +index_buffer: 2268 +index_buffer: 2223 +index_buffer: 2270 +index_buffer: 2268 +index_buffer: 2274 +index_buffer: 2270 +index_buffer: 2274 +index_buffer: 2275 +index_buffer: 2276 +index_buffer: 2233 +index_buffer: 2232 +index_buffer: 2276 +index_buffer: 2232 +index_buffer: 2277 +index_buffer: 2277 +index_buffer: 2232 +index_buffer: 2229 +index_buffer: 2277 +index_buffer: 2229 +index_buffer: 2272 +index_buffer: 2273 +index_buffer: 2278 +index_buffer: 2277 +index_buffer: 2273 +index_buffer: 2277 +index_buffer: 2272 +index_buffer: 2278 +index_buffer: 2279 +index_buffer: 2276 +index_buffer: 2278 +index_buffer: 2276 +index_buffer: 2277 +index_buffer: 2274 +index_buffer: 2228 +index_buffer: 2231 +index_buffer: 2274 +index_buffer: 2231 +index_buffer: 2280 +index_buffer: 2235 +index_buffer: 2281 +index_buffer: 2280 +index_buffer: 2235 +index_buffer: 2280 +index_buffer: 2231 +index_buffer: 2281 +index_buffer: 2282 +index_buffer: 2283 +index_buffer: 2281 +index_buffer: 2283 +index_buffer: 2280 +index_buffer: 2275 +index_buffer: 2274 +index_buffer: 2280 +index_buffer: 2275 +index_buffer: 2280 +index_buffer: 2283 +index_buffer: 2284 +index_buffer: 2285 +index_buffer: 2239 +index_buffer: 2284 +index_buffer: 2239 +index_buffer: 2238 +index_buffer: 2276 +index_buffer: 2284 +index_buffer: 2238 +index_buffer: 2276 +index_buffer: 2238 +index_buffer: 2233 +index_buffer: 2284 +index_buffer: 2276 +index_buffer: 2279 +index_buffer: 2284 +index_buffer: 2279 +index_buffer: 2286 +index_buffer: 2284 +index_buffer: 2286 +index_buffer: 2287 +index_buffer: 2284 +index_buffer: 2287 +index_buffer: 2285 +index_buffer: 2281 +index_buffer: 2235 +index_buffer: 2237 +index_buffer: 2281 +index_buffer: 2237 +index_buffer: 2288 +index_buffer: 2241 +index_buffer: 2289 +index_buffer: 2288 +index_buffer: 2241 +index_buffer: 2288 +index_buffer: 2237 +index_buffer: 2290 +index_buffer: 2288 +index_buffer: 2289 +index_buffer: 2290 +index_buffer: 2289 +index_buffer: 2291 +index_buffer: 2288 +index_buffer: 2290 +index_buffer: 2282 +index_buffer: 2288 +index_buffer: 2282 +index_buffer: 2281 +index_buffer: 2244 +index_buffer: 2292 +index_buffer: 2293 +index_buffer: 2244 +index_buffer: 2293 +index_buffer: 2246 +index_buffer: 2285 +index_buffer: 2292 +index_buffer: 2244 +index_buffer: 2285 +index_buffer: 2244 +index_buffer: 2239 +index_buffer: 2285 +index_buffer: 2287 +index_buffer: 2294 +index_buffer: 2285 +index_buffer: 2294 +index_buffer: 2292 +index_buffer: 2295 +index_buffer: 2293 +index_buffer: 2292 +index_buffer: 2295 +index_buffer: 2292 +index_buffer: 2294 +index_buffer: 2243 +index_buffer: 2296 +index_buffer: 2289 +index_buffer: 2243 +index_buffer: 2289 +index_buffer: 2241 +index_buffer: 2243 +index_buffer: 2247 +index_buffer: 2297 +index_buffer: 2243 +index_buffer: 2297 +index_buffer: 2296 +index_buffer: 2298 +index_buffer: 2299 +index_buffer: 2296 +index_buffer: 2298 +index_buffer: 2296 +index_buffer: 2297 +index_buffer: 2291 +index_buffer: 2289 +index_buffer: 2296 +index_buffer: 2291 +index_buffer: 2296 +index_buffer: 2299 +index_buffer: 2251 +index_buffer: 2250 +index_buffer: 2300 +index_buffer: 2251 +index_buffer: 2300 +index_buffer: 2301 +index_buffer: 2250 +index_buffer: 2246 +index_buffer: 2293 +index_buffer: 2250 +index_buffer: 2293 +index_buffer: 2300 +index_buffer: 2302 +index_buffer: 2300 +index_buffer: 2293 +index_buffer: 2302 +index_buffer: 2293 +index_buffer: 2295 +index_buffer: 2302 +index_buffer: 2303 +index_buffer: 2301 +index_buffer: 2302 +index_buffer: 2301 +index_buffer: 2300 +index_buffer: 2304 +index_buffer: 2297 +index_buffer: 2247 +index_buffer: 2304 +index_buffer: 2247 +index_buffer: 2249 +index_buffer: 2249 +index_buffer: 2253 +index_buffer: 2305 +index_buffer: 2249 +index_buffer: 2305 +index_buffer: 2304 +index_buffer: 2305 +index_buffer: 2306 +index_buffer: 2307 +index_buffer: 2305 +index_buffer: 2307 +index_buffer: 2304 +index_buffer: 2297 +index_buffer: 2304 +index_buffer: 2307 +index_buffer: 2297 +index_buffer: 2307 +index_buffer: 2298 +index_buffer: 2265 +index_buffer: 2225 +index_buffer: 2220 +index_buffer: 2265 +index_buffer: 2220 +index_buffer: 2260 +index_buffer: 2261 +index_buffer: 2267 +index_buffer: 2265 +index_buffer: 2261 +index_buffer: 2265 +index_buffer: 2260 +index_buffer: 2224 +index_buffer: 2269 +index_buffer: 2262 +index_buffer: 2224 +index_buffer: 2262 +index_buffer: 2219 +index_buffer: 2269 +index_buffer: 2271 +index_buffer: 2263 +index_buffer: 2269 +index_buffer: 2263 +index_buffer: 2262 +index_buffer: 2257 +index_buffer: 2256 +index_buffer: 2308 +index_buffer: 2257 +index_buffer: 2308 +index_buffer: 2309 +index_buffer: 2301 +index_buffer: 2308 +index_buffer: 2256 +index_buffer: 2301 +index_buffer: 2256 +index_buffer: 2251 +index_buffer: 2308 +index_buffer: 2301 +index_buffer: 2303 +index_buffer: 2308 +index_buffer: 2303 +index_buffer: 2310 +index_buffer: 2310 +index_buffer: 2311 +index_buffer: 2309 +index_buffer: 2310 +index_buffer: 2309 +index_buffer: 2308 +index_buffer: 2312 +index_buffer: 2305 +index_buffer: 2253 +index_buffer: 2312 +index_buffer: 2253 +index_buffer: 2254 +index_buffer: 2254 +index_buffer: 2259 +index_buffer: 2313 +index_buffer: 2254 +index_buffer: 2313 +index_buffer: 2312 +index_buffer: 2314 +index_buffer: 2312 +index_buffer: 2313 +index_buffer: 2314 +index_buffer: 2313 +index_buffer: 2315 +index_buffer: 2312 +index_buffer: 2314 +index_buffer: 2306 +index_buffer: 2312 +index_buffer: 2306 +index_buffer: 2305 +index_buffer: 2259 +index_buffer: 2258 +index_buffer: 2316 +index_buffer: 2259 +index_buffer: 2316 +index_buffer: 2317 +index_buffer: 2318 +index_buffer: 2316 +index_buffer: 2258 +index_buffer: 2318 +index_buffer: 2258 +index_buffer: 2257 +index_buffer: 2257 +index_buffer: 2309 +index_buffer: 2319 +index_buffer: 2257 +index_buffer: 2319 +index_buffer: 2318 +index_buffer: 2311 +index_buffer: 2320 +index_buffer: 2319 +index_buffer: 2311 +index_buffer: 2319 +index_buffer: 2309 +index_buffer: 2321 +index_buffer: 2322 +index_buffer: 2323 +index_buffer: 2321 +index_buffer: 2323 +index_buffer: 2315 +index_buffer: 2324 +index_buffer: 2325 +index_buffer: 2326 +index_buffer: 2324 +index_buffer: 2326 +index_buffer: 2327 +index_buffer: 2328 +index_buffer: 2329 +index_buffer: 2327 +index_buffer: 2328 +index_buffer: 2327 +index_buffer: 2326 +index_buffer: 2261 +index_buffer: 1793 +index_buffer: 1865 +index_buffer: 2261 +index_buffer: 1865 +index_buffer: 2330 +index_buffer: 1867 +index_buffer: 1797 +index_buffer: 2263 +index_buffer: 1867 +index_buffer: 2263 +index_buffer: 2331 +index_buffer: 2267 +index_buffer: 2332 +index_buffer: 2333 +index_buffer: 2267 +index_buffer: 2333 +index_buffer: 2266 +index_buffer: 2334 +index_buffer: 2335 +index_buffer: 2271 +index_buffer: 2334 +index_buffer: 2271 +index_buffer: 2270 +index_buffer: 2333 +index_buffer: 2336 +index_buffer: 2273 +index_buffer: 2333 +index_buffer: 2273 +index_buffer: 2266 +index_buffer: 2275 +index_buffer: 2337 +index_buffer: 2334 +index_buffer: 2275 +index_buffer: 2334 +index_buffer: 2270 +index_buffer: 2338 +index_buffer: 2278 +index_buffer: 2273 +index_buffer: 2338 +index_buffer: 2273 +index_buffer: 2336 +index_buffer: 2338 +index_buffer: 2339 +index_buffer: 2279 +index_buffer: 2338 +index_buffer: 2279 +index_buffer: 2278 +index_buffer: 2282 +index_buffer: 2340 +index_buffer: 2341 +index_buffer: 2282 +index_buffer: 2341 +index_buffer: 2283 +index_buffer: 2283 +index_buffer: 2341 +index_buffer: 2337 +index_buffer: 2283 +index_buffer: 2337 +index_buffer: 2275 +index_buffer: 2339 +index_buffer: 2342 +index_buffer: 2286 +index_buffer: 2339 +index_buffer: 2286 +index_buffer: 2279 +index_buffer: 2286 +index_buffer: 2342 +index_buffer: 2343 +index_buffer: 2286 +index_buffer: 2343 +index_buffer: 2287 +index_buffer: 2344 +index_buffer: 2345 +index_buffer: 2290 +index_buffer: 2344 +index_buffer: 2290 +index_buffer: 2291 +index_buffer: 2290 +index_buffer: 2345 +index_buffer: 2340 +index_buffer: 2290 +index_buffer: 2340 +index_buffer: 2282 +index_buffer: 2343 +index_buffer: 2346 +index_buffer: 2294 +index_buffer: 2343 +index_buffer: 2294 +index_buffer: 2287 +index_buffer: 2294 +index_buffer: 2346 +index_buffer: 2347 +index_buffer: 2294 +index_buffer: 2347 +index_buffer: 2295 +index_buffer: 2348 +index_buffer: 2299 +index_buffer: 2298 +index_buffer: 2348 +index_buffer: 2298 +index_buffer: 2349 +index_buffer: 2299 +index_buffer: 2348 +index_buffer: 2344 +index_buffer: 2299 +index_buffer: 2344 +index_buffer: 2291 +index_buffer: 2302 +index_buffer: 2295 +index_buffer: 2347 +index_buffer: 2302 +index_buffer: 2347 +index_buffer: 2350 +index_buffer: 2350 +index_buffer: 2351 +index_buffer: 2303 +index_buffer: 2350 +index_buffer: 2303 +index_buffer: 2302 +index_buffer: 2306 +index_buffer: 2352 +index_buffer: 2353 +index_buffer: 2306 +index_buffer: 2353 +index_buffer: 2307 +index_buffer: 2353 +index_buffer: 2349 +index_buffer: 2298 +index_buffer: 2353 +index_buffer: 2298 +index_buffer: 2307 +index_buffer: 2261 +index_buffer: 2330 +index_buffer: 2332 +index_buffer: 2261 +index_buffer: 2332 +index_buffer: 2267 +index_buffer: 2335 +index_buffer: 2331 +index_buffer: 2263 +index_buffer: 2335 +index_buffer: 2263 +index_buffer: 2271 +index_buffer: 2351 +index_buffer: 2354 +index_buffer: 2310 +index_buffer: 2351 +index_buffer: 2310 +index_buffer: 2303 +index_buffer: 2354 +index_buffer: 2355 +index_buffer: 2311 +index_buffer: 2354 +index_buffer: 2311 +index_buffer: 2310 +index_buffer: 2315 +index_buffer: 2323 +index_buffer: 2356 +index_buffer: 2315 +index_buffer: 2356 +index_buffer: 2314 +index_buffer: 2314 +index_buffer: 2356 +index_buffer: 2352 +index_buffer: 2314 +index_buffer: 2352 +index_buffer: 2306 +index_buffer: 2320 +index_buffer: 2311 +index_buffer: 2355 +index_buffer: 2320 +index_buffer: 2355 +index_buffer: 2357 +index_buffer: 2313 +index_buffer: 2358 +index_buffer: 2321 +index_buffer: 2313 +index_buffer: 2321 +index_buffer: 2315 +index_buffer: 2358 +index_buffer: 2313 +index_buffer: 2259 +index_buffer: 2358 +index_buffer: 2259 +index_buffer: 2317 +index_buffer: 2326 +index_buffer: 2325 +index_buffer: 2359 +index_buffer: 2326 +index_buffer: 2359 +index_buffer: 2360 +index_buffer: 2359 +index_buffer: 2361 +index_buffer: 2362 +index_buffer: 2359 +index_buffer: 2362 +index_buffer: 2360 +index_buffer: 2362 +index_buffer: 2363 +index_buffer: 2364 +index_buffer: 2362 +index_buffer: 2364 +index_buffer: 2360 +index_buffer: 2360 +index_buffer: 2364 +index_buffer: 2328 +index_buffer: 2360 +index_buffer: 2328 +index_buffer: 2326 +index_buffer: 2318 +index_buffer: 2363 +index_buffer: 2362 +index_buffer: 2318 +index_buffer: 2362 +index_buffer: 2316 +index_buffer: 2362 +index_buffer: 2361 +index_buffer: 2317 +index_buffer: 2362 +index_buffer: 2317 +index_buffer: 2316 +index_buffer: 2365 +index_buffer: 2366 +index_buffer: 2319 +index_buffer: 2365 +index_buffer: 2319 +index_buffer: 2320 +index_buffer: 2363 +index_buffer: 2318 +index_buffer: 2319 +index_buffer: 2363 +index_buffer: 2319 +index_buffer: 2366 +index_buffer: 2367 +index_buffer: 2368 +index_buffer: 2322 +index_buffer: 2367 +index_buffer: 2322 +index_buffer: 2321 +index_buffer: 2320 +index_buffer: 2357 +index_buffer: 2369 +index_buffer: 2320 +index_buffer: 2369 +index_buffer: 2365 +index_buffer: 2317 +index_buffer: 2361 +index_buffer: 2370 +index_buffer: 2317 +index_buffer: 2370 +index_buffer: 2358 +index_buffer: 2321 +index_buffer: 2358 +index_buffer: 2370 +index_buffer: 2321 +index_buffer: 2370 +index_buffer: 2367 +index_buffer: 2371 +index_buffer: 2372 +index_buffer: 2373 +index_buffer: 2371 +index_buffer: 2373 +index_buffer: 2374 +index_buffer: 2375 +index_buffer: 2374 +index_buffer: 2373 +index_buffer: 2375 +index_buffer: 2373 +index_buffer: 2376 +index_buffer: 2377 +index_buffer: 2378 +index_buffer: 2379 +index_buffer: 2377 +index_buffer: 2379 +index_buffer: 2380 +index_buffer: 2381 +index_buffer: 2380 +index_buffer: 2379 +index_buffer: 2381 +index_buffer: 2379 +index_buffer: 2382 +index_buffer: 2381 +index_buffer: 2383 +index_buffer: 2384 +index_buffer: 2381 +index_buffer: 2384 +index_buffer: 2380 +index_buffer: 2384 +index_buffer: 2385 +index_buffer: 2377 +index_buffer: 2384 +index_buffer: 2377 +index_buffer: 2380 +index_buffer: 2386 +index_buffer: 2387 +index_buffer: 2388 +index_buffer: 2386 +index_buffer: 2388 +index_buffer: 2389 +index_buffer: 2390 +index_buffer: 2391 +index_buffer: 2387 +index_buffer: 2390 +index_buffer: 2387 +index_buffer: 2386 +index_buffer: 2392 +index_buffer: 2393 +index_buffer: 2394 +index_buffer: 2392 +index_buffer: 2394 +index_buffer: 2395 +index_buffer: 2395 +index_buffer: 2394 +index_buffer: 2396 +index_buffer: 2395 +index_buffer: 2396 +index_buffer: 2397 +index_buffer: 2398 +index_buffer: 2395 +index_buffer: 2397 +index_buffer: 2398 +index_buffer: 2397 +index_buffer: 2399 +index_buffer: 2400 +index_buffer: 2392 +index_buffer: 2395 +index_buffer: 2400 +index_buffer: 2395 +index_buffer: 2398 +index_buffer: 2401 +index_buffer: 2402 +index_buffer: 2403 +index_buffer: 2401 +index_buffer: 2403 +index_buffer: 2404 +index_buffer: 2405 +index_buffer: 2406 +index_buffer: 2404 +index_buffer: 2405 +index_buffer: 2404 +index_buffer: 2403 +index_buffer: 2407 +index_buffer: 2404 +index_buffer: 2406 +index_buffer: 2407 +index_buffer: 2406 +index_buffer: 2408 +index_buffer: 2409 +index_buffer: 2401 +index_buffer: 2404 +index_buffer: 2409 +index_buffer: 2404 +index_buffer: 2407 +index_buffer: 2371 +index_buffer: 2410 +index_buffer: 2411 +index_buffer: 2371 +index_buffer: 2411 +index_buffer: 2372 +index_buffer: 2412 +index_buffer: 2411 +index_buffer: 2410 +index_buffer: 2412 +index_buffer: 2410 +index_buffer: 2413 +index_buffer: 2393 +index_buffer: 2392 +index_buffer: 2414 +index_buffer: 2393 +index_buffer: 2414 +index_buffer: 2415 +index_buffer: 2416 +index_buffer: 2414 +index_buffer: 2392 +index_buffer: 2416 +index_buffer: 2392 +index_buffer: 2400 +index_buffer: 2416 +index_buffer: 2417 +index_buffer: 2418 +index_buffer: 2416 +index_buffer: 2418 +index_buffer: 2414 +index_buffer: 2414 +index_buffer: 2418 +index_buffer: 2419 +index_buffer: 2414 +index_buffer: 2419 +index_buffer: 2415 +index_buffer: 2420 +index_buffer: 2421 +index_buffer: 2422 +index_buffer: 2420 +index_buffer: 2422 +index_buffer: 2423 +index_buffer: 2424 +index_buffer: 2425 +index_buffer: 2420 +index_buffer: 2424 +index_buffer: 2420 +index_buffer: 2423 +index_buffer: 2420 +index_buffer: 2425 +index_buffer: 2426 +index_buffer: 2420 +index_buffer: 2426 +index_buffer: 2427 +index_buffer: 2421 +index_buffer: 2420 +index_buffer: 2427 +index_buffer: 2421 +index_buffer: 2427 +index_buffer: 2428 +index_buffer: 2429 +index_buffer: 2430 +index_buffer: 2431 +index_buffer: 2429 +index_buffer: 2431 +index_buffer: 2432 +index_buffer: 2433 +index_buffer: 2434 +index_buffer: 2432 +index_buffer: 2433 +index_buffer: 2432 +index_buffer: 2431 +index_buffer: 2435 +index_buffer: 2432 +index_buffer: 2434 +index_buffer: 2435 +index_buffer: 2434 +index_buffer: 2436 +index_buffer: 2437 +index_buffer: 2429 +index_buffer: 2432 +index_buffer: 2437 +index_buffer: 2432 +index_buffer: 2435 +index_buffer: 2438 +index_buffer: 2439 +index_buffer: 2412 +index_buffer: 2438 +index_buffer: 2412 +index_buffer: 2413 +index_buffer: 2440 +index_buffer: 2441 +index_buffer: 2439 +index_buffer: 2440 +index_buffer: 2439 +index_buffer: 2438 +index_buffer: 2419 +index_buffer: 2418 +index_buffer: 2442 +index_buffer: 2419 +index_buffer: 2442 +index_buffer: 2443 +index_buffer: 2418 +index_buffer: 2417 +index_buffer: 2444 +index_buffer: 2418 +index_buffer: 2444 +index_buffer: 2442 +index_buffer: 2442 +index_buffer: 2444 +index_buffer: 2445 +index_buffer: 2442 +index_buffer: 2445 +index_buffer: 2446 +index_buffer: 2443 +index_buffer: 2442 +index_buffer: 2446 +index_buffer: 2443 +index_buffer: 2446 +index_buffer: 2447 +index_buffer: 2427 +index_buffer: 2448 +index_buffer: 2449 +index_buffer: 2427 +index_buffer: 2449 +index_buffer: 2428 +index_buffer: 2426 +index_buffer: 2450 +index_buffer: 2448 +index_buffer: 2426 +index_buffer: 2448 +index_buffer: 2427 +index_buffer: 2448 +index_buffer: 2450 +index_buffer: 2451 +index_buffer: 2448 +index_buffer: 2451 +index_buffer: 2452 +index_buffer: 2448 +index_buffer: 2452 +index_buffer: 2453 +index_buffer: 2448 +index_buffer: 2453 +index_buffer: 2449 +index_buffer: 2454 +index_buffer: 2437 +index_buffer: 2435 +index_buffer: 2454 +index_buffer: 2435 +index_buffer: 2455 +index_buffer: 2455 +index_buffer: 2435 +index_buffer: 2436 +index_buffer: 2455 +index_buffer: 2436 +index_buffer: 2456 +index_buffer: 2457 +index_buffer: 2455 +index_buffer: 2456 +index_buffer: 2457 +index_buffer: 2456 +index_buffer: 2458 +index_buffer: 2459 +index_buffer: 2454 +index_buffer: 2455 +index_buffer: 2459 +index_buffer: 2455 +index_buffer: 2457 +index_buffer: 2460 +index_buffer: 2441 +index_buffer: 2440 +index_buffer: 2460 +index_buffer: 2440 +index_buffer: 2461 +index_buffer: 2388 +index_buffer: 2460 +index_buffer: 2461 +index_buffer: 2388 +index_buffer: 2461 +index_buffer: 2389 +index_buffer: 2462 +index_buffer: 2447 +index_buffer: 2446 +index_buffer: 2462 +index_buffer: 2446 +index_buffer: 2463 +index_buffer: 2446 +index_buffer: 2445 +index_buffer: 2464 +index_buffer: 2446 +index_buffer: 2464 +index_buffer: 2463 +index_buffer: 2463 +index_buffer: 2464 +index_buffer: 2465 +index_buffer: 2463 +index_buffer: 2465 +index_buffer: 2466 +index_buffer: 2466 +index_buffer: 2467 +index_buffer: 2462 +index_buffer: 2466 +index_buffer: 2462 +index_buffer: 2463 +index_buffer: 2453 +index_buffer: 2452 +index_buffer: 2468 +index_buffer: 2453 +index_buffer: 2468 +index_buffer: 2469 +index_buffer: 2452 +index_buffer: 2451 +index_buffer: 2470 +index_buffer: 2452 +index_buffer: 2470 +index_buffer: 2468 +index_buffer: 2471 +index_buffer: 2472 +index_buffer: 2468 +index_buffer: 2471 +index_buffer: 2468 +index_buffer: 2470 +index_buffer: 2473 +index_buffer: 2469 +index_buffer: 2468 +index_buffer: 2473 +index_buffer: 2468 +index_buffer: 2472 +index_buffer: 2474 +index_buffer: 2459 +index_buffer: 2457 +index_buffer: 2474 +index_buffer: 2457 +index_buffer: 2475 +index_buffer: 2475 +index_buffer: 2457 +index_buffer: 2458 +index_buffer: 2475 +index_buffer: 2458 +index_buffer: 2476 +index_buffer: 2477 +index_buffer: 2475 +index_buffer: 2476 +index_buffer: 2477 +index_buffer: 2476 +index_buffer: 2478 +index_buffer: 2477 +index_buffer: 2479 +index_buffer: 2474 +index_buffer: 2477 +index_buffer: 2474 +index_buffer: 2475 +index_buffer: 2375 +index_buffer: 2376 +index_buffer: 2480 +index_buffer: 2375 +index_buffer: 2480 +index_buffer: 2481 +index_buffer: 2481 +index_buffer: 2480 +index_buffer: 2482 +index_buffer: 2481 +index_buffer: 2482 +index_buffer: 2483 +index_buffer: 2373 +index_buffer: 2484 +index_buffer: 2485 +index_buffer: 2373 +index_buffer: 2485 +index_buffer: 2376 +index_buffer: 2372 +index_buffer: 2486 +index_buffer: 2484 +index_buffer: 2372 +index_buffer: 2484 +index_buffer: 2373 +index_buffer: 2484 +index_buffer: 2486 +index_buffer: 2487 +index_buffer: 2484 +index_buffer: 2487 +index_buffer: 2488 +index_buffer: 2485 +index_buffer: 2484 +index_buffer: 2488 +index_buffer: 2485 +index_buffer: 2488 +index_buffer: 2489 +index_buffer: 2396 +index_buffer: 2490 +index_buffer: 2491 +index_buffer: 2396 +index_buffer: 2491 +index_buffer: 2397 +index_buffer: 2492 +index_buffer: 2491 +index_buffer: 2490 +index_buffer: 2492 +index_buffer: 2490 +index_buffer: 2493 +index_buffer: 2491 +index_buffer: 2492 +index_buffer: 2494 +index_buffer: 2491 +index_buffer: 2494 +index_buffer: 2495 +index_buffer: 2495 +index_buffer: 2399 +index_buffer: 2397 +index_buffer: 2495 +index_buffer: 2397 +index_buffer: 2491 +index_buffer: 2382 +index_buffer: 2496 +index_buffer: 2497 +index_buffer: 2382 +index_buffer: 2497 +index_buffer: 2381 +index_buffer: 2383 +index_buffer: 2381 +index_buffer: 2497 +index_buffer: 2383 +index_buffer: 2497 +index_buffer: 2498 +index_buffer: 2499 +index_buffer: 2500 +index_buffer: 2382 +index_buffer: 2499 +index_buffer: 2382 +index_buffer: 2379 +index_buffer: 2379 +index_buffer: 2378 +index_buffer: 2405 +index_buffer: 2379 +index_buffer: 2405 +index_buffer: 2499 +index_buffer: 2501 +index_buffer: 2502 +index_buffer: 2503 +index_buffer: 2501 +index_buffer: 2503 +index_buffer: 2504 +index_buffer: 2502 +index_buffer: 2402 +index_buffer: 2505 +index_buffer: 2502 +index_buffer: 2505 +index_buffer: 2503 +index_buffer: 2505 +index_buffer: 2506 +index_buffer: 2507 +index_buffer: 2505 +index_buffer: 2507 +index_buffer: 2503 +index_buffer: 2507 +index_buffer: 2508 +index_buffer: 2504 +index_buffer: 2507 +index_buffer: 2504 +index_buffer: 2503 +index_buffer: 2509 +index_buffer: 2374 +index_buffer: 2375 +index_buffer: 2509 +index_buffer: 2375 +index_buffer: 2510 +index_buffer: 2394 +index_buffer: 2509 +index_buffer: 2510 +index_buffer: 2394 +index_buffer: 2510 +index_buffer: 2396 +index_buffer: 2393 +index_buffer: 2511 +index_buffer: 2509 +index_buffer: 2393 +index_buffer: 2509 +index_buffer: 2394 +index_buffer: 2511 +index_buffer: 2371 +index_buffer: 2374 +index_buffer: 2511 +index_buffer: 2374 +index_buffer: 2509 +index_buffer: 2512 +index_buffer: 2513 +index_buffer: 2389 +index_buffer: 2512 +index_buffer: 2389 +index_buffer: 2514 +index_buffer: 2467 +index_buffer: 2466 +index_buffer: 2515 +index_buffer: 2467 +index_buffer: 2515 +index_buffer: 2514 +index_buffer: 2466 +index_buffer: 2465 +index_buffer: 2516 +index_buffer: 2466 +index_buffer: 2516 +index_buffer: 2515 +index_buffer: 2517 +index_buffer: 2513 +index_buffer: 2512 +index_buffer: 2517 +index_buffer: 2512 +index_buffer: 2518 +index_buffer: 2519 +index_buffer: 2410 +index_buffer: 2371 +index_buffer: 2519 +index_buffer: 2371 +index_buffer: 2511 +index_buffer: 2415 +index_buffer: 2519 +index_buffer: 2511 +index_buffer: 2415 +index_buffer: 2511 +index_buffer: 2393 +index_buffer: 2415 +index_buffer: 2419 +index_buffer: 2520 +index_buffer: 2415 +index_buffer: 2520 +index_buffer: 2519 +index_buffer: 2413 +index_buffer: 2410 +index_buffer: 2519 +index_buffer: 2413 +index_buffer: 2519 +index_buffer: 2520 +index_buffer: 2521 +index_buffer: 2438 +index_buffer: 2413 +index_buffer: 2521 +index_buffer: 2413 +index_buffer: 2520 +index_buffer: 2520 +index_buffer: 2419 +index_buffer: 2443 +index_buffer: 2520 +index_buffer: 2443 +index_buffer: 2521 +index_buffer: 2443 +index_buffer: 2447 +index_buffer: 2522 +index_buffer: 2443 +index_buffer: 2522 +index_buffer: 2521 +index_buffer: 2522 +index_buffer: 2440 +index_buffer: 2438 +index_buffer: 2522 +index_buffer: 2438 +index_buffer: 2521 +index_buffer: 2523 +index_buffer: 2461 +index_buffer: 2440 +index_buffer: 2523 +index_buffer: 2440 +index_buffer: 2522 +index_buffer: 2522 +index_buffer: 2447 +index_buffer: 2462 +index_buffer: 2522 +index_buffer: 2462 +index_buffer: 2523 +index_buffer: 2523 +index_buffer: 2462 +index_buffer: 2467 +index_buffer: 2523 +index_buffer: 2467 +index_buffer: 2514 +index_buffer: 2389 +index_buffer: 2461 +index_buffer: 2523 +index_buffer: 2389 +index_buffer: 2523 +index_buffer: 2514 +index_buffer: 2524 +index_buffer: 2525 +index_buffer: 2500 +index_buffer: 2524 +index_buffer: 2500 +index_buffer: 2526 +index_buffer: 2527 +index_buffer: 2524 +index_buffer: 2526 +index_buffer: 2527 +index_buffer: 2526 +index_buffer: 2501 +index_buffer: 2403 +index_buffer: 2528 +index_buffer: 2499 +index_buffer: 2403 +index_buffer: 2499 +index_buffer: 2405 +index_buffer: 2403 +index_buffer: 2402 +index_buffer: 2502 +index_buffer: 2403 +index_buffer: 2502 +index_buffer: 2528 +index_buffer: 2526 +index_buffer: 2528 +index_buffer: 2502 +index_buffer: 2526 +index_buffer: 2502 +index_buffer: 2501 +index_buffer: 2526 +index_buffer: 2500 +index_buffer: 2499 +index_buffer: 2526 +index_buffer: 2499 +index_buffer: 2528 +index_buffer: 2405 +index_buffer: 2378 +index_buffer: 2529 +index_buffer: 2405 +index_buffer: 2529 +index_buffer: 2406 +index_buffer: 2529 +index_buffer: 2530 +index_buffer: 2408 +index_buffer: 2529 +index_buffer: 2408 +index_buffer: 2406 +index_buffer: 2425 +index_buffer: 2424 +index_buffer: 2531 +index_buffer: 2425 +index_buffer: 2531 +index_buffer: 2532 +index_buffer: 2533 +index_buffer: 2426 +index_buffer: 2425 +index_buffer: 2533 +index_buffer: 2425 +index_buffer: 2532 +index_buffer: 2450 +index_buffer: 2426 +index_buffer: 2533 +index_buffer: 2450 +index_buffer: 2533 +index_buffer: 2534 +index_buffer: 2451 +index_buffer: 2450 +index_buffer: 2534 +index_buffer: 2451 +index_buffer: 2534 +index_buffer: 2535 +index_buffer: 2470 +index_buffer: 2451 +index_buffer: 2535 +index_buffer: 2470 +index_buffer: 2535 +index_buffer: 2536 +index_buffer: 2470 +index_buffer: 2536 +index_buffer: 2537 +index_buffer: 2470 +index_buffer: 2537 +index_buffer: 2471 +index_buffer: 2538 +index_buffer: 2539 +index_buffer: 2540 +index_buffer: 2538 +index_buffer: 2540 +index_buffer: 2541 +index_buffer: 2540 +index_buffer: 2539 +index_buffer: 2542 +index_buffer: 2540 +index_buffer: 2542 +index_buffer: 2543 +index_buffer: 2543 +index_buffer: 2544 +index_buffer: 2545 +index_buffer: 2543 +index_buffer: 2545 +index_buffer: 2540 +index_buffer: 2541 +index_buffer: 2540 +index_buffer: 2545 +index_buffer: 2541 +index_buffer: 2545 +index_buffer: 2546 +index_buffer: 2537 +index_buffer: 2547 +index_buffer: 2548 +index_buffer: 2537 +index_buffer: 2548 +index_buffer: 2471 +index_buffer: 2549 +index_buffer: 2477 +index_buffer: 2478 +index_buffer: 2549 +index_buffer: 2478 +index_buffer: 2550 +index_buffer: 2551 +index_buffer: 2552 +index_buffer: 2553 +index_buffer: 2551 +index_buffer: 2553 +index_buffer: 2554 +index_buffer: 2555 +index_buffer: 2554 +index_buffer: 2553 +index_buffer: 2555 +index_buffer: 2553 +index_buffer: 2556 +index_buffer: 2557 +index_buffer: 2558 +index_buffer: 2554 +index_buffer: 2557 +index_buffer: 2554 +index_buffer: 2555 +index_buffer: 2558 +index_buffer: 2559 +index_buffer: 2551 +index_buffer: 2558 +index_buffer: 2551 +index_buffer: 2554 +index_buffer: 2560 +index_buffer: 2561 +index_buffer: 2562 +index_buffer: 2560 +index_buffer: 2562 +index_buffer: 2563 +index_buffer: 2564 +index_buffer: 2565 +index_buffer: 2562 +index_buffer: 2564 +index_buffer: 2562 +index_buffer: 2561 +index_buffer: 2565 +index_buffer: 2566 +index_buffer: 2567 +index_buffer: 2565 +index_buffer: 2567 +index_buffer: 2562 +index_buffer: 2567 +index_buffer: 2568 +index_buffer: 2563 +index_buffer: 2567 +index_buffer: 2563 +index_buffer: 2562 +index_buffer: 2569 +index_buffer: 2570 +index_buffer: 2559 +index_buffer: 2569 +index_buffer: 2559 +index_buffer: 2558 +index_buffer: 2558 +index_buffer: 2557 +index_buffer: 2571 +index_buffer: 2558 +index_buffer: 2571 +index_buffer: 2569 +index_buffer: 2569 +index_buffer: 2571 +index_buffer: 2572 +index_buffer: 2569 +index_buffer: 2572 +index_buffer: 2573 +index_buffer: 2570 +index_buffer: 2569 +index_buffer: 2573 +index_buffer: 2570 +index_buffer: 2573 +index_buffer: 2574 +index_buffer: 2487 +index_buffer: 2575 +index_buffer: 2434 +index_buffer: 2487 +index_buffer: 2434 +index_buffer: 2433 +index_buffer: 2436 +index_buffer: 2434 +index_buffer: 2575 +index_buffer: 2436 +index_buffer: 2575 +index_buffer: 2576 +index_buffer: 2574 +index_buffer: 2573 +index_buffer: 2577 +index_buffer: 2574 +index_buffer: 2577 +index_buffer: 2578 +index_buffer: 2573 +index_buffer: 2572 +index_buffer: 2579 +index_buffer: 2573 +index_buffer: 2579 +index_buffer: 2577 +index_buffer: 2577 +index_buffer: 2579 +index_buffer: 2580 +index_buffer: 2577 +index_buffer: 2580 +index_buffer: 2581 +index_buffer: 2578 +index_buffer: 2577 +index_buffer: 2581 +index_buffer: 2578 +index_buffer: 2581 +index_buffer: 2582 +index_buffer: 2456 +index_buffer: 2436 +index_buffer: 2576 +index_buffer: 2456 +index_buffer: 2576 +index_buffer: 2583 +index_buffer: 2458 +index_buffer: 2456 +index_buffer: 2583 +index_buffer: 2458 +index_buffer: 2583 +index_buffer: 2584 +index_buffer: 2582 +index_buffer: 2581 +index_buffer: 2585 +index_buffer: 2582 +index_buffer: 2585 +index_buffer: 2586 +index_buffer: 2581 +index_buffer: 2580 +index_buffer: 2587 +index_buffer: 2581 +index_buffer: 2587 +index_buffer: 2585 +index_buffer: 2587 +index_buffer: 2588 +index_buffer: 2589 +index_buffer: 2587 +index_buffer: 2589 +index_buffer: 2585 +index_buffer: 2586 +index_buffer: 2585 +index_buffer: 2589 +index_buffer: 2586 +index_buffer: 2589 +index_buffer: 2590 +index_buffer: 2476 +index_buffer: 2458 +index_buffer: 2584 +index_buffer: 2476 +index_buffer: 2584 +index_buffer: 2591 +index_buffer: 2478 +index_buffer: 2476 +index_buffer: 2591 +index_buffer: 2478 +index_buffer: 2591 +index_buffer: 2391 +index_buffer: 2592 +index_buffer: 2433 +index_buffer: 2431 +index_buffer: 2592 +index_buffer: 2431 +index_buffer: 2593 +index_buffer: 2593 +index_buffer: 2431 +index_buffer: 2430 +index_buffer: 2593 +index_buffer: 2430 +index_buffer: 2594 +index_buffer: 2595 +index_buffer: 2593 +index_buffer: 2594 +index_buffer: 2595 +index_buffer: 2594 +index_buffer: 2596 +index_buffer: 2595 +index_buffer: 2597 +index_buffer: 2592 +index_buffer: 2595 +index_buffer: 2592 +index_buffer: 2593 +index_buffer: 2598 +index_buffer: 2599 +index_buffer: 2600 +index_buffer: 2598 +index_buffer: 2600 +index_buffer: 2601 +index_buffer: 2602 +index_buffer: 2598 +index_buffer: 2601 +index_buffer: 2602 +index_buffer: 2601 +index_buffer: 2603 +index_buffer: 2604 +index_buffer: 2597 +index_buffer: 2595 +index_buffer: 2604 +index_buffer: 2595 +index_buffer: 2605 +index_buffer: 2595 +index_buffer: 2596 +index_buffer: 2606 +index_buffer: 2595 +index_buffer: 2606 +index_buffer: 2605 +index_buffer: 2607 +index_buffer: 2608 +index_buffer: 2605 +index_buffer: 2607 +index_buffer: 2605 +index_buffer: 2606 +index_buffer: 2604 +index_buffer: 2605 +index_buffer: 2608 +index_buffer: 2604 +index_buffer: 2608 +index_buffer: 2609 +index_buffer: 2610 +index_buffer: 2611 +index_buffer: 2612 +index_buffer: 2610 +index_buffer: 2612 +index_buffer: 2613 +index_buffer: 2610 +index_buffer: 2614 +index_buffer: 2615 +index_buffer: 2610 +index_buffer: 2615 +index_buffer: 2611 +index_buffer: 2603 +index_buffer: 2601 +index_buffer: 2611 +index_buffer: 2603 +index_buffer: 2611 +index_buffer: 2615 +index_buffer: 2611 +index_buffer: 2601 +index_buffer: 2600 +index_buffer: 2611 +index_buffer: 2600 +index_buffer: 2612 +index_buffer: 2590 +index_buffer: 2589 +index_buffer: 2616 +index_buffer: 2590 +index_buffer: 2616 +index_buffer: 2617 +index_buffer: 2618 +index_buffer: 2619 +index_buffer: 2473 +index_buffer: 2618 +index_buffer: 2473 +index_buffer: 2472 +index_buffer: 2548 +index_buffer: 2618 +index_buffer: 2472 +index_buffer: 2548 +index_buffer: 2472 +index_buffer: 2471 +index_buffer: 2620 +index_buffer: 2621 +index_buffer: 2622 +index_buffer: 2620 +index_buffer: 2622 +index_buffer: 2623 +index_buffer: 2624 +index_buffer: 2625 +index_buffer: 2561 +index_buffer: 2624 +index_buffer: 2561 +index_buffer: 2560 +index_buffer: 2625 +index_buffer: 2626 +index_buffer: 2564 +index_buffer: 2625 +index_buffer: 2564 +index_buffer: 2561 +index_buffer: 2538 +index_buffer: 2541 +index_buffer: 2384 +index_buffer: 2538 +index_buffer: 2384 +index_buffer: 2383 +index_buffer: 2384 +index_buffer: 2541 +index_buffer: 2546 +index_buffer: 2384 +index_buffer: 2546 +index_buffer: 2385 +index_buffer: 2550 +index_buffer: 2478 +index_buffer: 2391 +index_buffer: 2550 +index_buffer: 2391 +index_buffer: 2390 +index_buffer: 2551 +index_buffer: 2398 +index_buffer: 2399 +index_buffer: 2551 +index_buffer: 2399 +index_buffer: 2552 +index_buffer: 2559 +index_buffer: 2400 +index_buffer: 2398 +index_buffer: 2559 +index_buffer: 2398 +index_buffer: 2551 +index_buffer: 2563 +index_buffer: 2407 +index_buffer: 2408 +index_buffer: 2563 +index_buffer: 2408 +index_buffer: 2560 +index_buffer: 2409 +index_buffer: 2407 +index_buffer: 2563 +index_buffer: 2409 +index_buffer: 2563 +index_buffer: 2568 +index_buffer: 2559 +index_buffer: 2570 +index_buffer: 2416 +index_buffer: 2559 +index_buffer: 2416 +index_buffer: 2400 +index_buffer: 2416 +index_buffer: 2570 +index_buffer: 2574 +index_buffer: 2416 +index_buffer: 2574 +index_buffer: 2417 +index_buffer: 2417 +index_buffer: 2574 +index_buffer: 2578 +index_buffer: 2417 +index_buffer: 2578 +index_buffer: 2444 +index_buffer: 2444 +index_buffer: 2578 +index_buffer: 2582 +index_buffer: 2444 +index_buffer: 2582 +index_buffer: 2445 +index_buffer: 2445 +index_buffer: 2582 +index_buffer: 2586 +index_buffer: 2445 +index_buffer: 2586 +index_buffer: 2464 +index_buffer: 2464 +index_buffer: 2586 +index_buffer: 2590 +index_buffer: 2464 +index_buffer: 2590 +index_buffer: 2465 +index_buffer: 2488 +index_buffer: 2487 +index_buffer: 2433 +index_buffer: 2488 +index_buffer: 2433 +index_buffer: 2592 +index_buffer: 2489 +index_buffer: 2488 +index_buffer: 2592 +index_buffer: 2489 +index_buffer: 2592 +index_buffer: 2597 +index_buffer: 2627 +index_buffer: 2508 +index_buffer: 2600 +index_buffer: 2627 +index_buffer: 2600 +index_buffer: 2599 +index_buffer: 2628 +index_buffer: 2489 +index_buffer: 2597 +index_buffer: 2628 +index_buffer: 2597 +index_buffer: 2604 +index_buffer: 2629 +index_buffer: 2628 +index_buffer: 2604 +index_buffer: 2629 +index_buffer: 2604 +index_buffer: 2609 +index_buffer: 2613 +index_buffer: 2612 +index_buffer: 2507 +index_buffer: 2613 +index_buffer: 2507 +index_buffer: 2506 +index_buffer: 2507 +index_buffer: 2612 +index_buffer: 2600 +index_buffer: 2507 +index_buffer: 2600 +index_buffer: 2508 +index_buffer: 2465 +index_buffer: 2590 +index_buffer: 2617 +index_buffer: 2465 +index_buffer: 2617 +index_buffer: 2516 +index_buffer: 2623 +index_buffer: 2622 +index_buffer: 2517 +index_buffer: 2623 +index_buffer: 2517 +index_buffer: 2518 +index_buffer: 2624 +index_buffer: 2560 +index_buffer: 2408 +index_buffer: 2624 +index_buffer: 2408 +index_buffer: 2530 +index_buffer: 2575 +index_buffer: 2487 +index_buffer: 2486 +index_buffer: 2575 +index_buffer: 2486 +index_buffer: 2630 +index_buffer: 2486 +index_buffer: 2372 +index_buffer: 2411 +index_buffer: 2486 +index_buffer: 2411 +index_buffer: 2630 +index_buffer: 2412 +index_buffer: 2631 +index_buffer: 2630 +index_buffer: 2412 +index_buffer: 2630 +index_buffer: 2411 +index_buffer: 2631 +index_buffer: 2576 +index_buffer: 2575 +index_buffer: 2631 +index_buffer: 2575 +index_buffer: 2630 +index_buffer: 2583 +index_buffer: 2576 +index_buffer: 2631 +index_buffer: 2583 +index_buffer: 2631 +index_buffer: 2632 +index_buffer: 2632 +index_buffer: 2631 +index_buffer: 2412 +index_buffer: 2632 +index_buffer: 2412 +index_buffer: 2439 +index_buffer: 2633 +index_buffer: 2632 +index_buffer: 2439 +index_buffer: 2633 +index_buffer: 2439 +index_buffer: 2441 +index_buffer: 2584 +index_buffer: 2583 +index_buffer: 2632 +index_buffer: 2584 +index_buffer: 2632 +index_buffer: 2633 +index_buffer: 2591 +index_buffer: 2584 +index_buffer: 2633 +index_buffer: 2591 +index_buffer: 2633 +index_buffer: 2634 +index_buffer: 2633 +index_buffer: 2441 +index_buffer: 2460 +index_buffer: 2633 +index_buffer: 2460 +index_buffer: 2634 +index_buffer: 2460 +index_buffer: 2388 +index_buffer: 2387 +index_buffer: 2460 +index_buffer: 2387 +index_buffer: 2634 +index_buffer: 2391 +index_buffer: 2591 +index_buffer: 2634 +index_buffer: 2391 +index_buffer: 2634 +index_buffer: 2387 +index_buffer: 2525 +index_buffer: 2496 +index_buffer: 2382 +index_buffer: 2525 +index_buffer: 2382 +index_buffer: 2500 +index_buffer: 2527 +index_buffer: 2501 +index_buffer: 2504 +index_buffer: 2527 +index_buffer: 2504 +index_buffer: 2635 +index_buffer: 2635 +index_buffer: 2504 +index_buffer: 2508 +index_buffer: 2635 +index_buffer: 2508 +index_buffer: 2627 +index_buffer: 2636 +index_buffer: 2497 +index_buffer: 2496 +index_buffer: 2636 +index_buffer: 2496 +index_buffer: 2482 +index_buffer: 2636 +index_buffer: 2629 +index_buffer: 2498 +index_buffer: 2636 +index_buffer: 2498 +index_buffer: 2497 +index_buffer: 2493 +index_buffer: 2637 +index_buffer: 2524 +index_buffer: 2493 +index_buffer: 2524 +index_buffer: 2527 +index_buffer: 2637 +index_buffer: 2483 +index_buffer: 2525 +index_buffer: 2637 +index_buffer: 2525 +index_buffer: 2524 +index_buffer: 2602 +index_buffer: 2638 +index_buffer: 2639 +index_buffer: 2602 +index_buffer: 2639 +index_buffer: 2598 +index_buffer: 2598 +index_buffer: 2639 +index_buffer: 2640 +index_buffer: 2598 +index_buffer: 2640 +index_buffer: 2599 +index_buffer: 2539 +index_buffer: 2641 +index_buffer: 2642 +index_buffer: 2539 +index_buffer: 2642 +index_buffer: 2542 +index_buffer: 2538 +index_buffer: 2643 +index_buffer: 2641 +index_buffer: 2538 +index_buffer: 2641 +index_buffer: 2539 +index_buffer: 2627 +index_buffer: 2599 +index_buffer: 2640 +index_buffer: 2627 +index_buffer: 2640 +index_buffer: 2494 +index_buffer: 2538 +index_buffer: 2383 +index_buffer: 2498 +index_buffer: 2538 +index_buffer: 2498 +index_buffer: 2643 +index_buffer: 2483 +index_buffer: 2482 +index_buffer: 2496 +index_buffer: 2483 +index_buffer: 2496 +index_buffer: 2525 +index_buffer: 2527 +index_buffer: 2635 +index_buffer: 2492 +index_buffer: 2527 +index_buffer: 2492 +index_buffer: 2493 +index_buffer: 2635 +index_buffer: 2627 +index_buffer: 2494 +index_buffer: 2635 +index_buffer: 2494 +index_buffer: 2492 +index_buffer: 2608 +index_buffer: 2607 +index_buffer: 2642 +index_buffer: 2608 +index_buffer: 2642 +index_buffer: 2641 +index_buffer: 2643 +index_buffer: 2609 +index_buffer: 2608 +index_buffer: 2643 +index_buffer: 2608 +index_buffer: 2641 +index_buffer: 2609 +index_buffer: 2643 +index_buffer: 2498 +index_buffer: 2609 +index_buffer: 2498 +index_buffer: 2629 +index_buffer: 2482 +index_buffer: 2480 +index_buffer: 2644 +index_buffer: 2482 +index_buffer: 2644 +index_buffer: 2636 +index_buffer: 2480 +index_buffer: 2376 +index_buffer: 2485 +index_buffer: 2480 +index_buffer: 2485 +index_buffer: 2644 +index_buffer: 2485 +index_buffer: 2489 +index_buffer: 2628 +index_buffer: 2485 +index_buffer: 2628 +index_buffer: 2644 +index_buffer: 2628 +index_buffer: 2629 +index_buffer: 2636 +index_buffer: 2628 +index_buffer: 2636 +index_buffer: 2644 +index_buffer: 2493 +index_buffer: 2490 +index_buffer: 2645 +index_buffer: 2493 +index_buffer: 2645 +index_buffer: 2637 +index_buffer: 2490 +index_buffer: 2396 +index_buffer: 2510 +index_buffer: 2490 +index_buffer: 2510 +index_buffer: 2645 +index_buffer: 2510 +index_buffer: 2375 +index_buffer: 2481 +index_buffer: 2510 +index_buffer: 2481 +index_buffer: 2645 +index_buffer: 2645 +index_buffer: 2481 +index_buffer: 2483 +index_buffer: 2645 +index_buffer: 2483 +index_buffer: 2637 +index_buffer: 2646 +index_buffer: 2639 +index_buffer: 2638 +index_buffer: 2646 +index_buffer: 2638 +index_buffer: 2647 +index_buffer: 2647 +index_buffer: 2556 +index_buffer: 2553 +index_buffer: 2647 +index_buffer: 2553 +index_buffer: 2646 +index_buffer: 2553 +index_buffer: 2552 +index_buffer: 2648 +index_buffer: 2553 +index_buffer: 2648 +index_buffer: 2646 +index_buffer: 2639 +index_buffer: 2646 +index_buffer: 2648 +index_buffer: 2639 +index_buffer: 2648 +index_buffer: 2640 +index_buffer: 2494 +index_buffer: 2640 +index_buffer: 2648 +index_buffer: 2494 +index_buffer: 2648 +index_buffer: 2495 +index_buffer: 2552 +index_buffer: 2399 +index_buffer: 2495 +index_buffer: 2552 +index_buffer: 2495 +index_buffer: 2648 +index_buffer: 2649 +index_buffer: 2650 +index_buffer: 2531 +index_buffer: 2649 +index_buffer: 2531 +index_buffer: 2424 +index_buffer: 2649 +index_buffer: 2424 +index_buffer: 2423 +index_buffer: 2649 +index_buffer: 2423 +index_buffer: 2651 +index_buffer: 2652 +index_buffer: 2649 +index_buffer: 2651 +index_buffer: 2652 +index_buffer: 2651 +index_buffer: 2653 +index_buffer: 2652 +index_buffer: 2654 +index_buffer: 2650 +index_buffer: 2652 +index_buffer: 2650 +index_buffer: 2649 +index_buffer: 2655 +index_buffer: 2653 +index_buffer: 2656 +index_buffer: 2655 +index_buffer: 2656 +index_buffer: 2657 +index_buffer: 2658 +index_buffer: 2659 +index_buffer: 2655 +index_buffer: 2658 +index_buffer: 2655 +index_buffer: 2657 +index_buffer: 2660 +index_buffer: 2661 +index_buffer: 2655 +index_buffer: 2660 +index_buffer: 2655 +index_buffer: 2659 +index_buffer: 2661 +index_buffer: 2652 +index_buffer: 2653 +index_buffer: 2661 +index_buffer: 2653 +index_buffer: 2655 +index_buffer: 2662 +index_buffer: 2660 +index_buffer: 2659 +index_buffer: 2662 +index_buffer: 2659 +index_buffer: 2663 +index_buffer: 2664 +index_buffer: 2665 +index_buffer: 2661 +index_buffer: 2664 +index_buffer: 2661 +index_buffer: 2660 +index_buffer: 2654 +index_buffer: 2652 +index_buffer: 2661 +index_buffer: 2654 +index_buffer: 2661 +index_buffer: 2665 +index_buffer: 2651 +index_buffer: 2423 +index_buffer: 2422 +index_buffer: 2651 +index_buffer: 2422 +index_buffer: 2666 +index_buffer: 2651 +index_buffer: 2666 +index_buffer: 2656 +index_buffer: 2651 +index_buffer: 2656 +index_buffer: 2653 +index_buffer: 2667 +index_buffer: 2662 +index_buffer: 2663 +index_buffer: 2667 +index_buffer: 2663 +index_buffer: 2668 +index_buffer: 2669 +index_buffer: 2670 +index_buffer: 2671 +index_buffer: 2669 +index_buffer: 2671 +index_buffer: 2672 +index_buffer: 2669 +index_buffer: 2667 +index_buffer: 2668 +index_buffer: 2669 +index_buffer: 2668 +index_buffer: 2670 +index_buffer: 2672 +index_buffer: 2673 +index_buffer: 2674 +index_buffer: 2672 +index_buffer: 2674 +index_buffer: 2669 +index_buffer: 2675 +index_buffer: 2667 +index_buffer: 2669 +index_buffer: 2675 +index_buffer: 2669 +index_buffer: 2674 +index_buffer: 2675 +index_buffer: 2676 +index_buffer: 2662 +index_buffer: 2675 +index_buffer: 2662 +index_buffer: 2667 +index_buffer: 2676 +index_buffer: 2664 +index_buffer: 2660 +index_buffer: 2676 +index_buffer: 2660 +index_buffer: 2662 +index_buffer: 2663 +index_buffer: 2659 +index_buffer: 2658 +index_buffer: 2663 +index_buffer: 2658 +index_buffer: 2677 +index_buffer: 2678 +index_buffer: 2668 +index_buffer: 2663 +index_buffer: 2678 +index_buffer: 2663 +index_buffer: 2677 +index_buffer: 2679 +index_buffer: 2670 +index_buffer: 2668 +index_buffer: 2679 +index_buffer: 2668 +index_buffer: 2678 +index_buffer: 2679 +index_buffer: 2680 +index_buffer: 2671 +index_buffer: 2679 +index_buffer: 2671 +index_buffer: 2670 diff --git a/mediapipe/graphs/face_effect/data/glasses.pngblob b/mediapipe/graphs/face_effect/data/glasses.pngblob new file mode 100644 index 0000000..865584b Binary files /dev/null and b/mediapipe/graphs/face_effect/data/glasses.pngblob differ diff --git a/mediapipe/graphs/face_effect/face_effect_gpu.pbtxt b/mediapipe/graphs/face_effect/face_effect_gpu.pbtxt new file mode 100644 index 0000000..40888d0 --- /dev/null +++ b/mediapipe/graphs/face_effect/face_effect_gpu.pbtxt @@ -0,0 +1,130 @@ +# MediaPipe graph that applies a face effect to the input video stream. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# An integer, which indicate which effect is selected. (int) +# +# If `selected_effect_id` is `0`, the Axis effect is selected. +# If `selected_effect_id` is `1`, the Facepaint effect is selected. +# If `selected_effect_id` is `2`, the Glasses effect is selected. +# +# No other values are allowed for `selected_effect_id`. +input_stream: "selected_effect_id" + +# Indicates whether to use the face detection as the input source. (bool) +# +# If `true`, the face detection pipeline will be used to produce landmarks. +# If `false`, the face landmark pipeline will be used to produce landmarks. +input_side_packet: "use_face_detection_input_source" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" + +# A list of geometry data for a single detected face. +# +# NOTE: there will not be an output packet in this stream for this particular +# timestamp if none of faces detected. +# +# (std::vector) +output_stream: "multi_face_geometry" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Generates an environment that describes the current virtual scene. +node { + calculator: "FaceGeometryEnvGeneratorCalculator" + output_side_packet: "ENVIRONMENT:environment" + node_options: { + [type.googleapis.com/mediapipe.FaceGeometryEnvGeneratorCalculatorOptions] { + environment: { + origin_point_location: TOP_LEFT_CORNER + perspective_camera: { + vertical_fov_degrees: 63.0 # 63 degrees + near: 1.0 # 1cm + far: 10000.0 # 100m + } + } + } + } +} + +# Computes the face geometry for a single face. The input source is defined +# through `use_face_detection_input_source`. +node { + calculator: "SwitchContainer" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "ENABLE:use_face_detection_input_source" + input_side_packet: "ENVIRONMENT:environment" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + node_options: { + [type.googleapis.com/mediapipe.SwitchContainerOptions] { + contained_node: { + calculator: "SingleFaceGeometryFromLandmarksGpu" + } + contained_node: { + calculator: "SingleFaceGeometryFromDetectionGpu" + } + } + } +} + +# Renders the selected effect based on `selected_effect_id`. +node { + calculator: "SwitchContainer" + input_stream: "SELECT:selected_effect_id" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + input_side_packet: "ENVIRONMENT:environment" + output_stream: "IMAGE_GPU:output_video" + node_options: { + [type.googleapis.com/mediapipe.SwitchContainerOptions] { + contained_node: { + calculator: "FaceGeometryEffectRendererCalculator" + node_options: { + [type.googleapis.com/mediapipe.FaceGeometryEffectRendererCalculatorOptions] { + effect_texture_path: "mediapipe/graphs/face_effect/data/axis.pngblob" + effect_mesh_3d_path: "mediapipe/graphs/face_effect/data/axis.binarypb" + } + } + } + contained_node: { + calculator: "FaceGeometryEffectRendererCalculator" + node_options: { + [type.googleapis.com/mediapipe.FaceGeometryEffectRendererCalculatorOptions] { + effect_texture_path: "mediapipe/graphs/face_effect/data/facepaint.pngblob" + } + } + } + contained_node: { + calculator: "FaceGeometryEffectRendererCalculator" + node_options: { + [type.googleapis.com/mediapipe.FaceGeometryEffectRendererCalculatorOptions] { + effect_texture_path: "mediapipe/graphs/face_effect/data/glasses.pngblob" + effect_mesh_3d_path: "mediapipe/graphs/face_effect/data/glasses.binarypb" + } + } + } + } + } +} + diff --git a/mediapipe/graphs/face_effect/subgraphs/BUILD b/mediapipe/graphs/face_effect/subgraphs/BUILD new file mode 100644 index 0000000..0b23ad5 --- /dev/null +++ b/mediapipe/graphs/face_effect/subgraphs/BUILD @@ -0,0 +1,61 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "face_landmarks_smoothing", + graph = "face_landmarks_smoothing.pbtxt", + register_as = "FaceLandmarksSmoothing", + deps = [ + "//mediapipe/calculators/util:landmarks_smoothing_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "single_face_geometry_from_detection_gpu", + graph = "single_face_geometry_from_detection_gpu.pbtxt", + register_as = "SingleFaceGeometryFromDetectionGpu", + deps = [ + ":face_landmarks_smoothing", + "//mediapipe/calculators/core:concatenate_detection_vector_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_gpu", + "//mediapipe/modules/face_geometry:face_geometry_from_detection", + ], +) + +mediapipe_simple_subgraph( + name = "single_face_geometry_from_landmarks_gpu", + graph = "single_face_geometry_from_landmarks_gpu.pbtxt", + register_as = "SingleFaceGeometryFromLandmarksGpu", + deps = [ + ":face_landmarks_smoothing", + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:landmarks_smoothing_calculator", + "//mediapipe/modules/face_geometry:face_geometry_from_landmarks", + "//mediapipe/modules/face_landmark:face_landmark_front_gpu", + ], +) diff --git a/mediapipe/graphs/face_effect/subgraphs/face_landmarks_smoothing.pbtxt b/mediapipe/graphs/face_effect/subgraphs/face_landmarks_smoothing.pbtxt new file mode 100644 index 0000000..3f565f5 --- /dev/null +++ b/mediapipe/graphs/face_effect/subgraphs/face_landmarks_smoothing.pbtxt @@ -0,0 +1,24 @@ +# MediaPipe subgraph that smoothes face landmarks. + +type: "FaceLandmarksSmoothing" + +input_stream: "NORM_LANDMARKS:landmarks" +input_stream: "IMAGE_SIZE:input_image_size" +output_stream: "NORM_FILTERED_LANDMARKS:filtered_landmarks" + +# Applies smoothing to a face landmark list. The filter options were handpicked +# to achieve better visual results. +node { + calculator: "LandmarksSmoothingCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + input_stream: "IMAGE_SIZE:input_image_size" + output_stream: "NORM_FILTERED_LANDMARKS:filtered_landmarks" + node_options: { + [type.googleapis.com/mediapipe.LandmarksSmoothingCalculatorOptions] { + velocity_filter: { + window_size: 5 + velocity_scale: 20.0 + } + } + } +} diff --git a/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_detection_gpu.pbtxt b/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_detection_gpu.pbtxt new file mode 100644 index 0000000..bce72c1 --- /dev/null +++ b/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_detection_gpu.pbtxt @@ -0,0 +1,91 @@ +# MediaPipe subgraph that extracts geometry from a single face using the face +# landmark pipeline on an input GPU image. The face landmarks are also +# "smoothed" to achieve better visual results. + +type: "SingleFaceGeometryFromDetectionGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:input_image" + +# Environment that describes the current virtual scene. +# (face_geometry::Environment) +input_side_packet: "ENVIRONMENT:environment" + +# A list of geometry data for a single detected face. The size of this +# collection is at most 1 because of the single-face use in this graph. +# (std::vector) +# +# NOTE: if no face is detected at a particular timestamp, there will not be an +# output packet in the `MULTI_FACE_GEOMETRY` stream for this timestamp. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + +# Subgraph that detects faces and corresponding landmarks using the face +# detection pipeline. +node { + calculator: "FaceDetectionShortRangeGpu" + input_stream: "IMAGE:input_image" + output_stream: "DETECTIONS:multi_face_detection" +} + +# Extracts the first face detection associated with the most prominent face from +# a collection. +node { + calculator: "SplitDetectionVectorCalculator" + input_stream: "multi_face_detection" + output_stream: "face_detection" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Extracts face detection keypoints as a normalized landmarks. +node { + calculator: "DetectionToLandmarksCalculator" + input_stream: "DETECTION:face_detection" + output_stream: "LANDMARKS:face_landmarks" +} + +# Extracts the input image frame dimensions as a separate packet. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_image" + output_stream: "SIZE:input_image_size" +} + +# Applies smoothing to the face landmarks previously extracted from the face +# detection keypoints. +node { + calculator: "FaceLandmarksSmoothing" + input_stream: "NORM_LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:input_image_size" + output_stream: "NORM_FILTERED_LANDMARKS:smoothed_face_landmarks" +} + +# Converts smoothed face landmarks back into the detection format. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:smoothed_face_landmarks" + output_stream: "DETECTION:smoothed_face_detection" +} + +# Puts the smoothed single face detection back into a collection to simplify +# passing the result into the `FaceGeometryFromDetection` subgraph. +node { + calculator: "ConcatenateDetectionVectorCalculator" + input_stream: "smoothed_face_detection" + output_stream: "multi_smoothed_face_detection" +} + +# Computes face geometry from the single face detection. +node { + calculator: "FaceGeometryFromDetection" + input_stream: "MULTI_FACE_DETECTION:multi_smoothed_face_detection" + input_stream: "IMAGE_SIZE:input_image_size" + input_side_packet: "ENVIRONMENT:environment" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" +} diff --git a/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_landmarks_gpu.pbtxt b/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_landmarks_gpu.pbtxt new file mode 100644 index 0000000..364e386 --- /dev/null +++ b/mediapipe/graphs/face_effect/subgraphs/single_face_geometry_from_landmarks_gpu.pbtxt @@ -0,0 +1,89 @@ +# MediaPipe subgraph that extracts geometry from a single face using the face +# landmark pipeline on an input GPU image. The face landmarks are also +# "smoothed" to achieve better visual results. + +type: "SingleFaceGeometryFromLandmarksGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:input_image" + +# Environment that describes the current virtual scene. +# (face_geometry::Environment) +input_side_packet: "ENVIRONMENT:environment" + +# A list of geometry data for a single detected face. The size of this +# collection is at most 1 because of the single-face use in this graph. +# (std::vector) +# +# NOTE: if no face is detected at a particular timestamp, there will not be an +# output packet in the `MULTI_FACE_GEOMETRY` stream for this timestamp. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + +# Creates a packet to inform the `FaceLandmarkFrontGpu` subgraph to detect at +# most 1 face. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_faces" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + } + } +} + +# Subgraph that detects faces and corresponding landmarks using the face +# landmark pipeline. +node { + calculator: "FaceLandmarkFrontGpu" + input_stream: "IMAGE:input_image" + input_side_packet: "NUM_FACES:num_faces" + output_stream: "LANDMARKS:multi_face_landmarks" +} + +# Extracts a single set of face landmarks associated with the most prominent +# face detected from a collection. +node { + calculator: "SplitNormalizedLandmarkListVectorCalculator" + input_stream: "multi_face_landmarks" + output_stream: "face_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Extracts the input image frame dimensions as a separate packet. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_image" + output_stream: "SIZE:input_image_size" +} + +# Applies smoothing to the single set of face landmarks. +node { + calculator: "FaceLandmarksSmoothing" + input_stream: "NORM_LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:input_image_size" + output_stream: "NORM_FILTERED_LANDMARKS:smoothed_face_landmarks" +} + +# Puts the single set of smoothed landmarks back into a collection to simplify +# passing the result into the `FaceGeometryFromLandmarks` subgraph. +node { + calculator: "ConcatenateLandmarListVectorCalculator" + input_stream: "smoothed_face_landmarks" + output_stream: "multi_smoothed_face_landmarks" +} + +# Computes face geometry from face landmarks for a single face. +node { + calculator: "FaceGeometryFromLandmarks" + input_stream: "MULTI_FACE_LANDMARKS:multi_smoothed_face_landmarks" + input_stream: "IMAGE_SIZE:input_image_size" + input_side_packet: "ENVIRONMENT:environment" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" +} diff --git a/mediapipe/graphs/face_mesh/BUILD b/mediapipe/graphs/face_mesh/BUILD new file mode 100644 index 0000000..6926fda --- /dev/null +++ b/mediapipe/graphs/face_mesh/BUILD @@ -0,0 +1,69 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "desktop_calculators", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + "//mediapipe/graphs/face_mesh/subgraphs:face_renderer_cpu", + "//mediapipe/modules/face_landmark:face_landmark_front_cpu", + ], +) + +cc_library( + name = "desktop_live_calculators", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/face_mesh/subgraphs:face_renderer_cpu", + "//mediapipe/modules/face_landmark:face_landmark_front_cpu", + ], +) + +cc_library( + name = "desktop_live_gpu_calculators", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/face_mesh/subgraphs:face_renderer_gpu", + "//mediapipe/modules/face_landmark:face_landmark_front_gpu", + ], +) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/face_mesh/subgraphs:face_renderer_gpu", + "//mediapipe/modules/face_landmark:face_landmark_front_gpu", + ], +) + +mediapipe_binary_graph( + name = "face_mesh_mobile_gpu_binary_graph", + graph = "face_mesh_mobile.pbtxt", + output_name = "face_mesh_mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) diff --git a/mediapipe/graphs/face_mesh/calculators/BUILD b/mediapipe/graphs/face_mesh/calculators/BUILD new file mode 100644 index 0000000..3bebfc9 --- /dev/null +++ b/mediapipe/graphs/face_mesh/calculators/BUILD @@ -0,0 +1,37 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "face_landmarks_to_render_data_calculator", + srcs = ["face_landmarks_to_render_data_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:location_data_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/util:color_cc_proto", + "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) diff --git a/mediapipe/graphs/face_mesh/calculators/face_landmarks_to_render_data_calculator.cc b/mediapipe/graphs/face_mesh/calculators/face_landmarks_to_render_data_calculator.cc new file mode 100644 index 0000000..093a732 --- /dev/null +++ b/mediapipe/graphs/face_mesh/calculators/face_landmarks_to_render_data_calculator.cc @@ -0,0 +1,104 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/location_data.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/util/color.pb.h" +#include "mediapipe/util/render_data.pb.h" +namespace mediapipe { + +namespace { + +constexpr int kNumFaceLandmarkConnections = 132; +// Pairs of landmark indices to be rendered with connections. +constexpr int kFaceLandmarkConnections[] = { + // Lips. + 61, 146, 146, 91, 91, 181, 181, 84, 84, 17, 17, 314, 314, 405, 405, 321, + 321, 375, 375, 291, 61, 185, 185, 40, 40, 39, 39, 37, 37, 0, 0, 267, 267, + 269, 269, 270, 270, 409, 409, 291, 78, 95, 95, 88, 88, 178, 178, 87, 87, 14, + 14, 317, 317, 402, 402, 318, 318, 324, 324, 308, 78, 191, 191, 80, 80, 81, + 81, 82, 82, 13, 13, 312, 312, 311, 311, 310, 310, 415, 415, 308, + // Left eye. + 33, 7, 7, 163, 163, 144, 144, 145, 145, 153, 153, 154, 154, 155, 155, 133, + 33, 246, 246, 161, 161, 160, 160, 159, 159, 158, 158, 157, 157, 173, 173, + 133, + // Left eyebrow. + 46, 53, 53, 52, 52, 65, 65, 55, 70, 63, 63, 105, 105, 66, 66, 107, + // Left iris. + 474, 475, 475, 476, 476, 477, 477, 474, + // Right eye. + 263, 249, 249, 390, 390, 373, 373, 374, 374, 380, 380, 381, 381, 382, 382, + 362, 263, 466, 466, 388, 388, 387, 387, 386, 386, 385, 385, 384, 384, 398, + 398, 362, + // Right eyebrow. + 276, 283, 283, 282, 282, 295, 295, 285, 300, 293, 293, 334, 334, 296, 296, + 336, + // Right iris. + 469, 470, 470, 471, 471, 472, 472, 469, + // Face oval. + 10, 338, 338, 297, 297, 332, 332, 284, 284, 251, 251, 389, 389, 356, 356, + 454, 454, 323, 323, 361, 361, 288, 288, 397, 397, 365, 365, 379, 379, 378, + 378, 400, 400, 377, 377, 152, 152, 148, 148, 176, 176, 149, 149, 150, 150, + 136, 136, 172, 172, 58, 58, 132, 132, 93, 93, 234, 234, 127, 127, 162, 162, + 21, 21, 54, 54, 103, 103, 67, 67, 109, 109, 10}; + +} // namespace + +// A calculator that converts face landmarks to RenderData proto for +// visualization. Ignores landmark_connections specified in +// LandmarksToRenderDataCalculatorOptions, if any, and always uses a fixed set +// of landmark connections specific to face landmark (defined in +// kFaceLandmarkConnections[] above). +// +// Example config: +// node { +// calculator: "FaceLandmarksToRenderDataCalculator" +// input_stream: "NORM_LANDMARKS:landmarks" +// output_stream: "RENDER_DATA:render_data" +// options { +// [LandmarksToRenderDataCalculatorOptions.ext] { +// landmark_color { r: 0 g: 255 b: 0 } +// connection_color { r: 0 g: 255 b: 0 } +// thickness: 4.0 +// } +// } +// } +class FaceLandmarksToRenderDataCalculator + : public LandmarksToRenderDataCalculator { + public: + absl::Status Open(CalculatorContext* cc) override; +}; +REGISTER_CALCULATOR(FaceLandmarksToRenderDataCalculator); + +absl::Status FaceLandmarksToRenderDataCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + options_ = cc->Options(); + + for (int i = 0; i < kNumFaceLandmarkConnections; ++i) { + landmark_connections_.push_back(kFaceLandmarkConnections[i * 2]); + landmark_connections_.push_back(kFaceLandmarkConnections[i * 2 + 1]); + } + + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/face_mesh/face_mesh_desktop.pbtxt b/mediapipe/graphs/face_mesh/face_mesh_desktop.pbtxt new file mode 100644 index 0000000..215791a --- /dev/null +++ b/mediapipe/graphs/face_mesh/face_mesh_desktop.pbtxt @@ -0,0 +1,70 @@ +# MediaPipe graph that performs face mesh on desktop with TensorFlow Lite +# on CPU. + +# Path to the input video file. (string) +input_side_packet: "input_video_path" +# Path to the output video file. (string) +input_side_packet: "output_video_path" + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Defines side packets for further use in the graph. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + output_side_packet: "PACKET:1:with_attention" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + packet { bool_value: true } + } + } +} + +# Subgraph that detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Subgraph that renders face-landmark annotation onto the input video. +node { + calculator: "FaceRendererCpu" + input_stream: "IMAGE:input_video" + input_stream: "LANDMARKS:multi_face_landmarks" + input_stream: "NORM_RECTS:face_rects_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt b/mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt new file mode 100644 index 0000000..2cc5634 --- /dev/null +++ b/mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt @@ -0,0 +1,66 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on CPU. + +# Input image. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" +# Collection of detected/processed faces, each represented as a list of +# landmarks. (std::vector) +output_stream: "multi_face_landmarks" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Defines side packets for further use in the graph. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + output_side_packet: "PACKET:1:with_attention" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + packet { bool_value: true } + } + } +} + +# Subgraph that detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Subgraph that renders face-landmark annotation onto the input image. +node { + calculator: "FaceRendererCpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:multi_face_landmarks" + input_stream: "NORM_RECTS:face_rects_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt b/mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt new file mode 100644 index 0000000..ae03709 --- /dev/null +++ b/mediapipe/graphs/face_mesh/face_mesh_desktop_live_gpu.pbtxt @@ -0,0 +1,66 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU. + +# Input image. (GpuBuffer) +input_stream: "input_video" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" +# Collection of detected/processed faces, each represented as a list of +# landmarks. (std::vector) +output_stream: "multi_face_landmarks" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Defines side packets for further use in the graph. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + output_side_packet: "PACKET:1:with_attention" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + packet { bool_value: true } + } + } +} + +# Subgraph that detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontGpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Subgraph that renders face-landmark annotation onto the input image. +node { + calculator: "FaceRendererGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:multi_face_landmarks" + input_stream: "NORM_RECTS:face_rects_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt b/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt new file mode 100644 index 0000000..e9711e1 --- /dev/null +++ b/mediapipe/graphs/face_mesh/face_mesh_mobile.pbtxt @@ -0,0 +1,67 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on GPU. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# Max number of faces to detect/process. (int) +input_side_packet: "num_faces" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" +# Collection of detected/processed faces, each represented as a list of +# landmarks. (std::vector) +output_stream: "multi_face_landmarks" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Defines side packets for further use in the graph. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:with_attention" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { bool_value: true } + } + } +} + +# Subgraph that detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontGpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Subgraph that renders face-landmark annotation onto the input image. +node { + calculator: "FaceRendererGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:multi_face_landmarks" + input_stream: "NORM_RECTS:face_rects_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/face_mesh/subgraphs/BUILD b/mediapipe/graphs/face_mesh/subgraphs/BUILD new file mode 100644 index 0000000..fbb946d --- /dev/null +++ b/mediapipe/graphs/face_mesh/subgraphs/BUILD @@ -0,0 +1,52 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "renderer_calculators", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + "//mediapipe/graphs/face_mesh/calculators:face_landmarks_to_render_data_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_renderer_gpu", + graph = "face_renderer_gpu.pbtxt", + register_as = "FaceRendererGpu", + deps = [ + ":renderer_calculators", + ], +) + +mediapipe_simple_subgraph( + name = "face_renderer_cpu", + graph = "face_renderer_cpu.pbtxt", + register_as = "FaceRendererCpu", + deps = [ + ":renderer_calculators", + ], +) diff --git a/mediapipe/graphs/face_mesh/subgraphs/face_renderer_cpu.pbtxt b/mediapipe/graphs/face_mesh/subgraphs/face_renderer_cpu.pbtxt new file mode 100644 index 0000000..f5793f3 --- /dev/null +++ b/mediapipe/graphs/face_mesh/subgraphs/face_renderer_cpu.pbtxt @@ -0,0 +1,96 @@ +# MediaPipe face mesh rendering subgraph. + +type: "FaceRendererCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_image" +# Collection of detected/predicted faces, each represented as a list of +# landmarks. (std::vector) +input_stream: "LANDMARKS:multi_face_landmarks" +# Regions of interest calculated based on palm detections. +# (std::vector) +input_stream: "NORM_RECTS:rects" +# Detected palms. (std::vector) +input_stream: "DETECTIONS:detections" + +# CPU image with rendered data. (ImageFrame) +output_stream: "IMAGE:output_image" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_image" + output_stream: "SIZE:image_size" +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Outputs each element of multi_face_landmarks at a fake timestamp for the rest +# of the graph to process. At the end of the loop, outputs the BATCH_END +# timestamp for downstream calculators to inform them that all elements in the +# vector have been processed. +node { + calculator: "BeginLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITERABLE:multi_face_landmarks" + output_stream: "ITEM:face_landmarks" + output_stream: "BATCH_END:landmark_timestamp" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "FaceLandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 0 g: 255 b: 0 } + thickness: 2 + visualize_landmark_depth: false + } + } +} + +# Collects a RenderData object for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of RenderData at the BATCH_END +# timestamp. +node { + calculator: "EndLoopRenderDataCalculator" + input_stream: "ITEM:landmarks_render_data" + input_stream: "BATCH_END:landmark_timestamp" + output_stream: "ITERABLE:multi_face_landmarks_render_data" +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:rects" + output_stream: "RENDER_DATA:rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "detections_render_data" + input_stream: "VECTOR:0:multi_face_landmarks_render_data" + input_stream: "rects_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/face_mesh/subgraphs/face_renderer_gpu.pbtxt b/mediapipe/graphs/face_mesh/subgraphs/face_renderer_gpu.pbtxt new file mode 100644 index 0000000..4e2b3f2 --- /dev/null +++ b/mediapipe/graphs/face_mesh/subgraphs/face_renderer_gpu.pbtxt @@ -0,0 +1,96 @@ +# MediaPipe face mesh rendering subgraph. + +type: "FaceRendererGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:input_image" +# Collection of detected/predicted faces, each represented as a list of +# landmarks. (std::vector) +input_stream: "LANDMARKS:multi_face_landmarks" +# Regions of interest calculated based on palm detections. +# (std::vector) +input_stream: "NORM_RECTS:rects" +# Detected palms. (std::vector) +input_stream: "DETECTIONS:detections" + +# GPU image with rendered data. (GpuBuffer) +output_stream: "IMAGE:output_image" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_image" + output_stream: "SIZE:image_size" +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Outputs each element of multi_face_landmarks at a fake timestamp for the rest +# of the graph to process. At the end of the loop, outputs the BATCH_END +# timestamp for downstream calculators to inform them that all elements in the +# vector have been processed. +node { + calculator: "BeginLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITERABLE:multi_face_landmarks" + output_stream: "ITEM:face_landmarks" + output_stream: "BATCH_END:end_timestamp" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "FaceLandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 0 g: 255 b: 0 } + thickness: 2 + visualize_landmark_depth: false + } + } +} + +# Collects a RenderData object for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of RenderData at the BATCH_END +# timestamp. +node { + calculator: "EndLoopRenderDataCalculator" + input_stream: "ITEM:landmarks_render_data" + input_stream: "BATCH_END:end_timestamp" + output_stream: "ITERABLE:multi_face_landmarks_render_data" +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:rects" + output_stream: "RENDER_DATA:rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:input_image" + input_stream: "detections_render_data" + input_stream: "VECTOR:0:multi_face_landmarks_render_data" + input_stream: "rects_render_data" + output_stream: "IMAGE_GPU:output_image" +} diff --git a/mediapipe/graphs/hair_segmentation/BUILD b/mediapipe/graphs/hair_segmentation/BUILD new file mode 100644 index 0000000..b177726 --- /dev/null +++ b/mediapipe/graphs/hair_segmentation/BUILD @@ -0,0 +1,61 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/calculators/image:set_alpha_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_segmentation_calculator", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + "//mediapipe/gpu:image_frame_to_gpu_buffer_calculator", + ], +) + +cc_library( + name = "desktop_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/calculators/image:set_alpha_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_segmentation_calculator", + ], +) + +mediapipe_binary_graph( + name = "mobile_gpu_binary_graph", + graph = "hair_segmentation_mobile_gpu.pbtxt", + output_name = "mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) diff --git a/mediapipe/graphs/hair_segmentation/hair_segmentation_desktop_live.pbtxt b/mediapipe/graphs/hair_segmentation/hair_segmentation_desktop_live.pbtxt new file mode 100644 index 0000000..36c6970 --- /dev/null +++ b/mediapipe/graphs/hair_segmentation/hair_segmentation_desktop_live.pbtxt @@ -0,0 +1,152 @@ +# MediaPipe graph that performs hair segmentation with TensorFlow Lite on CPU. +# Used in the example in +# mediapipie/examples/desktop/hair_segmentation:hair_segmentation_cpu + +# Images on CPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToSegmentationCalculator downstream in the graph to finish +# generating the corresponding hair mask before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToSegmentationCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:hair_mask" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transforms the input image on CPU to a 512x512 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the hair +# segmentation model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:throttled_input_video" + output_stream: "IMAGE:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 512 + output_height: 512 + } + } +} + +# Caches a mask fed back from the previous round of hair segmentation, and upon +# the arrival of the next input image sends out the cached mask with the +# timestamp replaced by that of the input image, essentially generating a packet +# that carries the previous mask. Note that upon the arrival of the very first +# input image, an empty packet is sent out to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:throttled_input_video" + input_stream: "LOOP:hair_mask" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:previous_hair_mask" +} + +# Embeds the hair mask generated from the previous round of hair segmentation +# as the alpha channel of the current input image. +node { + calculator: "SetAlphaCalculator" + input_stream: "IMAGE:transformed_input_video" + input_stream: "ALPHA:previous_hair_mask" + output_stream: "IMAGE:mask_embedded_input_video" +} + +# Converts the transformed input image on CPU into an image tensor stored in +# TfLiteTensor. The zero_center option is set to false to normalize the +# pixel values to [0.f, 1.f] as opposed to [-1.f, 1.f]. With the +# max_num_channels option set to 4, all 4 RGBA channels are contained in the +# image tensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:mask_embedded_input_video" + output_stream: "TENSORS:image_tensor" + node_options: { + [type.googleapis.com/mediapipe.TfLiteConverterCalculatorOptions] { + zero_center: false + max_num_channels: 4 + } + } +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" + node_options: { + [type.googleapis.com/mediapipe.TfLiteCustomOpResolverCalculatorOptions] { + use_gpu: false + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# tensor representing the hair segmentation, which has the same width and height +# as the input image tensor. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:segmentation_tensor" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/hair_segmentation.tflite" + use_gpu: false + } + } +} + +# Decodes the segmentation tensor generated by the TensorFlow Lite model into a +# mask of values in [0, 255], stored in a CPU buffer. It also +# takes the mask generated previously as another input to improve the temporal +# consistency. +node { + calculator: "TfLiteTensorsToSegmentationCalculator" + input_stream: "TENSORS:segmentation_tensor" + input_stream: "PREV_MASK:previous_hair_mask" + output_stream: "MASK:hair_mask" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToSegmentationCalculatorOptions] { + tensor_width: 512 + tensor_height: 512 + tensor_channels: 2 + combine_with_previous_ratio: 0.9 + output_layer_index: 1 + } + } +} + +# Colors the hair segmentation with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "MASK:hair_mask" + output_stream: "IMAGE:output_video" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + } + } +} diff --git a/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt b/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt new file mode 100644 index 0000000..c8db44d --- /dev/null +++ b/mediapipe/graphs/hair_segmentation/hair_segmentation_mobile_gpu.pbtxt @@ -0,0 +1,152 @@ +# MediaPipe graph that performs hair segmentation with TensorFlow Lite on GPU. +# Used in the example in +# mediapipie/examples/android/src/java/com/mediapipe/apps/hairsegmentationgpu. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToSegmentationCalculator downstream in the graph to finish +# generating the corresponding hair mask before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToSegmentationCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:hair_mask" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transforms the input image on GPU to a 512x512 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the hair +# segmentation model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + output_stream: "IMAGE_GPU:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 512 + output_height: 512 + } + } +} + +# Caches a mask fed back from the previous round of hair segmentation, and upon +# the arrival of the next input image sends out the cached mask with the +# timestamp replaced by that of the input image, essentially generating a packet +# that carries the previous mask. Note that upon the arrival of the very first +# input image, an empty packet is sent out to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:throttled_input_video" + input_stream: "LOOP:hair_mask" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:previous_hair_mask" +} + +# Embeds the hair mask generated from the previous round of hair segmentation +# as the alpha channel of the current input image. +node { + calculator: "SetAlphaCalculator" + input_stream: "IMAGE_GPU:transformed_input_video" + input_stream: "ALPHA_GPU:previous_hair_mask" + output_stream: "IMAGE_GPU:mask_embedded_input_video" +} + +# Converts the transformed input image on GPU into an image tensor stored in +# tflite::gpu::GlBuffer. The zero_center option is set to false to normalize the +# pixel values to [0.f, 1.f] as opposed to [-1.f, 1.f]. With the +# max_num_channels option set to 4, all 4 RGBA channels are contained in the +# image tensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE_GPU:mask_embedded_input_video" + output_stream: "TENSORS_GPU:image_tensor" + node_options: { + [type.googleapis.com/mediapipe.TfLiteConverterCalculatorOptions] { + zero_center: false + max_num_channels: 4 + } + } +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" + node_options: { + [type.googleapis.com/mediapipe.TfLiteCustomOpResolverCalculatorOptions] { + use_gpu: true + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# tensor representing the hair segmentation, which has the same width and height +# as the input image tensor. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS_GPU:image_tensor" + output_stream: "TENSORS_GPU:segmentation_tensor" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/hair_segmentation.tflite" + use_gpu: true + } + } +} + +# Decodes the segmentation tensor generated by the TensorFlow Lite model into a +# mask of values in [0.f, 1.f], stored in the R channel of a GPU buffer. It also +# takes the mask generated previously as another input to improve the temporal +# consistency. +node { + calculator: "TfLiteTensorsToSegmentationCalculator" + input_stream: "TENSORS_GPU:segmentation_tensor" + input_stream: "PREV_MASK_GPU:previous_hair_mask" + output_stream: "MASK_GPU:hair_mask" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToSegmentationCalculatorOptions] { + tensor_width: 512 + tensor_height: 512 + tensor_channels: 2 + combine_with_previous_ratio: 0.9 + output_layer_index: 1 + } + } +} + +# Colors the hair segmentation with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "MASK_GPU:hair_mask" + output_stream: "IMAGE_GPU:output_video" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + } + } +} diff --git a/mediapipe/graphs/hand_tracking/BUILD b/mediapipe/graphs/hand_tracking/BUILD new file mode 100644 index 0000000..71525bb --- /dev/null +++ b/mediapipe/graphs/hand_tracking/BUILD @@ -0,0 +1,91 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +exports_files(glob([ + "*.pbtxt", +])) + +cc_library( + name = "desktop_offline_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:immediate_mux_calculator", + "//mediapipe/calculators/core:packet_inner_join_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + ], +) + +cc_library( + name = "desktop_tflite_calculators", + deps = [ + ":desktop_offline_calculators", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:merge_calculator", + "//mediapipe/graphs/hand_tracking/subgraphs:hand_renderer_cpu", + "//mediapipe/modules/hand_landmark:hand_landmark_tracking_cpu", + ], +) + +mediapipe_binary_graph( + name = "hand_tracking_desktop_live_binary_graph", + graph = "hand_tracking_desktop_live.pbtxt", + output_name = "hand_tracking_desktop_live.binarypb", + deps = [":desktop_tflite_calculators"], +) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/hand_tracking/subgraphs:hand_renderer_gpu", + "//mediapipe/modules/hand_landmark:hand_landmark_tracking_gpu", + ], +) + +mediapipe_binary_graph( + name = "hand_tracking_mobile_gpu_binary_graph", + graph = "hand_tracking_mobile.pbtxt", + output_name = "hand_tracking_mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) + +cc_library( + name = "detection_mobile_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/modules/palm_detection:palm_detection_gpu", + ], +) + +mediapipe_binary_graph( + name = "hand_detection_mobile_gpu_binary_graph", + graph = "hand_detection_mobile.pbtxt", + output_name = "hand_detection_mobile_gpu.binarypb", + deps = [":detection_mobile_calculators"], +) diff --git a/mediapipe/graphs/hand_tracking/calculators/BUILD b/mediapipe/graphs/hand_tracking/calculators/BUILD new file mode 100644 index 0000000..3d15861 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/calculators/BUILD @@ -0,0 +1,17 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) diff --git a/mediapipe/graphs/hand_tracking/hand_detection_desktop.pbtxt b/mediapipe/graphs/hand_tracking/hand_detection_desktop.pbtxt new file mode 100644 index 0000000..3edcfe7 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_detection_desktop.pbtxt @@ -0,0 +1,61 @@ +# MediaPipe graph that performs hand detection on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipie/examples/desktop/hand_tracking:hand_detection_tflite. + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Detects palms. +node { + calculator: "PalmDetectionCpu" + input_stream: "IMAGE:input_video" + output_stream: "DETECTIONS:output_detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the original image coming into +# the graph. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt b/mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt new file mode 100644 index 0000000..1bbd8bc --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt @@ -0,0 +1,39 @@ +# MediaPipe graph that performs hand detection on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipe/examples/desktop/hand_tracking:hand_detection_cpu. + +# CPU image. (ImageFrame) +input_stream: "input_video" + +# CPU image. (ImageFrame) +output_stream: "output_video" + +# Detects palms. +node { + calculator: "PalmDetectionCpu" + input_stream: "IMAGE:input_video" + output_stream: "DETECTIONS:output_detections" +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the original image coming into +# the graph. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt b/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt new file mode 100644 index 0000000..74ff5c5 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt @@ -0,0 +1,59 @@ +# MediaPipe graph that performs hand detection with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/handdetectiongpu and +# mediapipe/examples/ios/handdetectiongpu. + +# GPU image. (GpuBuffer) +input_stream: "input_video" + +# GPU image. (GpuBuffer) +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for PalmDetectionGpu +# downstream in the graph to finish its tasks before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images in PalmDetectionGpu to 1. This prevents the nodes in +# PalmDetectionGpu from queuing up incoming images and data excessively, which +# leads to increased latency and memory usage, unwanted in real-time mobile +# applications. It also eliminates unnecessarily computation, e.g., the output +# produced by a node in the subgraph may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Detects palms. +node { + calculator: "PalmDetectionGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:palm_detections" +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:palm_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "detection_render_data" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/hand_tracking/hand_tracking_desktop.pbtxt b/mediapipe/graphs/hand_tracking/hand_tracking_desktop.pbtxt new file mode 100644 index 0000000..bc6e81c --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_tracking_desktop.pbtxt @@ -0,0 +1,68 @@ +# MediaPipe graph that performs hands tracking on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipe/examples/desktop/hand_tracking:hand_tracking_tflite. + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Generates side packet cotaining max number of hands to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_hands" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 2 } + } + } +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:multi_palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:multi_hand_rects" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:multi_palm_rects" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_cpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:multi_palm_detections" + input_stream: "LANDMARKS:landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:multi_palm_rects" + input_stream: "NORM_RECTS:1:multi_hand_rects" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt b/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt new file mode 100644 index 0000000..20de18f --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live.pbtxt @@ -0,0 +1,46 @@ +# MediaPipe graph that performs hands tracking on desktop with TensorFlow +# Lite on CPU. +# Used in the example in +# mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu. + +# CPU image. (ImageFrame) +input_stream: "input_video" + +# CPU image. (ImageFrame) +output_stream: "output_video" + +# Generates side packet cotaining max number of hands to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_hands" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 2 } + } + } +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:multi_palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:multi_hand_rects" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:multi_palm_rects" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_cpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:multi_palm_detections" + input_stream: "LANDMARKS:landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:multi_palm_rects" + input_stream: "NORM_RECTS:1:multi_hand_rects" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt b/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt new file mode 100644 index 0000000..4dcaac5 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt @@ -0,0 +1,48 @@ +# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu. + +# GPU image. (GpuBuffer) +input_stream: "input_video" + +# GPU image. (GpuBuffer) +output_stream: "output_video" +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +output_stream: "hand_landmarks" + +# Generates side packet cotaining max number of hands to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_hands" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 2 } + } + } +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingGpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:hand_landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_gpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:palm_detections" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections" + input_stream: "NORM_RECTS:1:hand_rects_from_landmarks" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt b/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt new file mode 100644 index 0000000..b47e2a4 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt @@ -0,0 +1,65 @@ +# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu. + +# GPU image. (GpuBuffer) +input_stream: "input_video" + +# Max number of hands to detect/process. (int) +input_side_packet: "num_hands" + +# Model complexity (0 or 1). (int) +input_side_packet: "model_complexity" + +# GPU image. (GpuBuffer) +output_stream: "output_video" +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +output_stream: "hand_landmarks" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingGpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:hand_landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_gpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:throttled_input_video" + input_stream: "DETECTIONS:palm_detections" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections" + input_stream: "NORM_RECTS:1:hand_rects_from_landmarks" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/hand_tracking/subgraphs/BUILD b/mediapipe/graphs/hand_tracking/subgraphs/BUILD new file mode 100644 index 0000000..f16a6db --- /dev/null +++ b/mediapipe/graphs/hand_tracking/subgraphs/BUILD @@ -0,0 +1,58 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "hand_renderer_cpu", + graph = "hand_renderer_cpu.pbtxt", + register_as = "HandRendererSubgraph", + deps = [ + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:labels_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_renderer_gpu", + graph = "hand_renderer_gpu.pbtxt", + register_as = "HandRendererSubgraph", + deps = [ + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:labels_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + ], +) diff --git a/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_cpu.pbtxt b/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_cpu.pbtxt new file mode 100644 index 0000000..eed1388 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_cpu.pbtxt @@ -0,0 +1,209 @@ +# MediaPipe graph to render hand landmarks and some related debug information. + +type: "HandRendererSubgraph" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_image" +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +input_stream: "LANDMARKS:multi_hand_landmarks" +# Handedness of the detected hand (i.e. is hand left or right). +# (std::vector) +input_stream: "HANDEDNESS:multi_handedness" +# Regions of interest calculated based on palm detections. +# (std::vector) +input_stream: "NORM_RECTS:0:multi_palm_rects" +# Regions of interest calculated based on landmarks. +# (std::vector) +input_stream: "NORM_RECTS:1:multi_hand_rects" +# Detected palms. (std::vector) +input_stream: "DETECTIONS:palm_detections" + +# Updated CPU image. (ImageFrame) +output_stream: "IMAGE:output_image" + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:palm_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:multi_hand_rects" + output_stream: "RENDER_DATA:multi_hand_rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:multi_palm_rects" + output_stream: "RENDER_DATA:multi_palm_rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 125 g: 0 b: 122 } + thickness: 4.0 + } + } +} + +# Outputs each element of multi_palm_landmarks at a fake timestamp for the rest +# of the graph to process. At the end of the loop, outputs the BATCH_END +# timestamp for downstream calculators to inform them that all elements in the +# vector have been processed. +node { + calculator: "BeginLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITERABLE:multi_hand_landmarks" + output_stream: "ITEM:single_hand_landmarks" + output_stream: "BATCH_END:landmark_timestamp" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:single_hand_landmarks" + output_stream: "RENDER_DATA:single_hand_landmark_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 0 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 5 + landmark_connections: 9 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 9 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 16 + landmark_connections: 13 + landmark_connections: 17 + landmark_connections: 0 + landmark_connections: 17 + landmark_connections: 17 + landmark_connections: 18 + landmark_connections: 18 + landmark_connections: 19 + landmark_connections: 19 + landmark_connections: 20 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 0 g: 255 b: 0 } + thickness: 4.0 + } + } +} + +# Collects a RenderData object for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of RenderData at the BATCH_END +# timestamp. +node { + calculator: "EndLoopRenderDataCalculator" + input_stream: "ITEM:single_hand_landmark_render_data" + input_stream: "BATCH_END:landmark_timestamp" + output_stream: "ITERABLE:multi_hand_landmarks_render_data" +} + +# Don't render handedness if there are more than one handedness reported. +node { + calculator: "ClassificationListVectorHasMinSizeCalculator" + input_stream: "ITERABLE:multi_handedness" + output_stream: "disallow_handedness_rendering" + node_options: { + [type.googleapis.com/mediapipe.CollectionHasMinSizeCalculatorOptions] { + min_size: 2 + } + } +} + +node { + calculator: "GateCalculator" + input_stream: "multi_handedness" + input_stream: "DISALLOW:disallow_handedness_rendering" + output_stream: "allowed_multi_handedness" + node_options: { + [type.googleapis.com/mediapipe.GateCalculatorOptions] { + empty_packets_as_allow: false + } + } +} + +node { + calculator: "SplitClassificationListVectorCalculator" + input_stream: "allowed_multi_handedness" + output_stream: "handedness" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Converts classification to drawing primitives for annotation overlay. +node { + calculator: "LabelsToRenderDataCalculator" + input_stream: "CLASSIFICATIONS:handedness" + output_stream: "RENDER_DATA:handedness_render_data" + node_options: { + [type.googleapis.com/mediapipe.LabelsToRenderDataCalculatorOptions]: { + color { r: 255 g: 0 b: 0 } + thickness: 10.0 + font_height_px: 50 + horizontal_offset_px: 30 + vertical_offset_px: 50 + + max_num_labels: 1 + location: TOP_LEFT + } + } +} + +# Draws annotations and overlays them on top of the input images. Consumes +# a vector of RenderData objects and draws each of them on the input frame. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "detection_render_data" + input_stream: "multi_hand_rects_render_data" + input_stream: "multi_palm_rects_render_data" + input_stream: "handedness_render_data" + input_stream: "VECTOR:0:multi_hand_landmarks_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_gpu.pbtxt b/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_gpu.pbtxt new file mode 100644 index 0000000..9f0af85 --- /dev/null +++ b/mediapipe/graphs/hand_tracking/subgraphs/hand_renderer_gpu.pbtxt @@ -0,0 +1,209 @@ +# MediaPipe graph to render hand landmarks and some related debug information. + +type: "HandRendererSubgraph" + +# GPU buffer. (GpuBuffer) +input_stream: "IMAGE:input_image" +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +input_stream: "LANDMARKS:multi_hand_landmarks" +# Handedness of the detected hand (i.e. is hand left or right). +# (std::vector) +input_stream: "HANDEDNESS:multi_handedness" +# Regions of interest calculated based on palm detections. +# (std::vector) +input_stream: "NORM_RECTS:0:multi_palm_rects" +# Regions of interest calculated based on landmarks. +# (std::vector) +input_stream: "NORM_RECTS:1:multi_hand_rects" +# Detected palms. (std::vector) +input_stream: "DETECTIONS:palm_detections" + +# Updated GPU buffer. (GpuBuffer) +output_stream: "IMAGE:output_image" + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:palm_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:multi_hand_rects" + output_stream: "RENDER_DATA:multi_hand_rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:multi_palm_rects" + output_stream: "RENDER_DATA:multi_palm_rects_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 125 g: 0 b: 122 } + thickness: 4.0 + } + } +} + +# Outputs each element of multi_palm_landmarks at a fake timestamp for the rest +# of the graph to process. At the end of the loop, outputs the BATCH_END +# timestamp for downstream calculators to inform them that all elements in the +# vector have been processed. +node { + calculator: "BeginLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITERABLE:multi_hand_landmarks" + output_stream: "ITEM:single_hand_landmarks" + output_stream: "BATCH_END:landmark_timestamp" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:single_hand_landmarks" + output_stream: "RENDER_DATA:single_hand_landmark_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 0 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 5 + landmark_connections: 9 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 9 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 16 + landmark_connections: 13 + landmark_connections: 17 + landmark_connections: 0 + landmark_connections: 17 + landmark_connections: 17 + landmark_connections: 18 + landmark_connections: 18 + landmark_connections: 19 + landmark_connections: 19 + landmark_connections: 20 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 0 g: 255 b: 0 } + thickness: 4.0 + } + } +} + +# Collects a RenderData object for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of RenderData at the BATCH_END +# timestamp. +node { + calculator: "EndLoopRenderDataCalculator" + input_stream: "ITEM:single_hand_landmark_render_data" + input_stream: "BATCH_END:landmark_timestamp" + output_stream: "ITERABLE:multi_hand_landmarks_render_data" +} + +# Don't render handedness if there are more than one handedness reported. +node { + calculator: "ClassificationListVectorHasMinSizeCalculator" + input_stream: "ITERABLE:multi_handedness" + output_stream: "disallow_handedness_rendering" + node_options: { + [type.googleapis.com/mediapipe.CollectionHasMinSizeCalculatorOptions] { + min_size: 2 + } + } +} + +node { + calculator: "GateCalculator" + input_stream: "multi_handedness" + input_stream: "DISALLOW:disallow_handedness_rendering" + output_stream: "allowed_multi_handedness" + node_options: { + [type.googleapis.com/mediapipe.GateCalculatorOptions] { + empty_packets_as_allow: false + } + } +} + +node { + calculator: "SplitClassificationListVectorCalculator" + input_stream: "allowed_multi_handedness" + output_stream: "handedness" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Converts classification to drawing primitives for annotation overlay. +node { + calculator: "LabelsToRenderDataCalculator" + input_stream: "CLASSIFICATIONS:handedness" + output_stream: "RENDER_DATA:handedness_render_data" + node_options: { + [type.googleapis.com/mediapipe.LabelsToRenderDataCalculatorOptions]: { + color { r: 255 g: 0 b: 0 } + thickness: 10.0 + font_height_px: 50 + horizontal_offset_px: 30 + vertical_offset_px: 50 + + max_num_labels: 1 + location: TOP_LEFT + } + } +} + +# Draws annotations and overlays them on top of the input images. Consumes +# a vector of RenderData objects and draws each of them on the input frame. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:input_image" + input_stream: "detection_render_data" + input_stream: "multi_hand_rects_render_data" + input_stream: "multi_palm_rects_render_data" + input_stream: "handedness_render_data" + input_stream: "VECTOR:0:multi_hand_landmarks_render_data" + output_stream: "IMAGE_GPU:output_image" +} diff --git a/mediapipe/graphs/holistic_tracking/BUILD b/mediapipe/graphs/holistic_tracking/BUILD new file mode 100644 index 0000000..986cf9f --- /dev/null +++ b/mediapipe/graphs/holistic_tracking/BUILD @@ -0,0 +1,70 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", + "mediapipe_simple_subgraph", +) + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +mediapipe_simple_subgraph( + name = "holistic_tracking_to_render_data", + graph = "holistic_tracking_to_render_data.pbtxt", + register_as = "HolisticTrackingToRenderData", + deps = [ + "//mediapipe/calculators/core:concatenate_normalized_landmark_list_calculator", + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/core:merge_calculator", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_scale_calculator", + "//mediapipe/modules/holistic_landmark:hand_wrist_for_pose", + ], +) + +cc_library( + name = "holistic_tracking_gpu_deps", + deps = [ + ":holistic_tracking_to_render_data", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/modules/holistic_landmark:holistic_landmark_gpu", + ], +) + +mediapipe_binary_graph( + name = "holistic_tracking_gpu", + graph = "holistic_tracking_gpu.pbtxt", + output_name = "holistic_tracking_gpu.binarypb", + deps = [":holistic_tracking_gpu_deps"], +) + +cc_library( + name = "holistic_tracking_cpu_graph_deps", + deps = [ + ":holistic_tracking_to_render_data", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/modules/holistic_landmark:holistic_landmark_cpu", + ], +) diff --git a/mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt b/mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt new file mode 100644 index 0000000..fead245 --- /dev/null +++ b/mediapipe/graphs/holistic_tracking/holistic_tracking_cpu.pbtxt @@ -0,0 +1,75 @@ +# Tracks and renders pose + hands + face landmarks. + +# CPU image. (ImageFrame) +input_stream: "input_video" + +# CPU image with rendered results. (ImageFrame) +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" + node_options: { + [type.googleapis.com/mediapipe.FlowLimiterCalculatorOptions] { + max_in_flight: 1 + max_in_queue: 1 + # Timeout is disabled (set to 0) as first frame processing can take more + # than 1 second. + in_flight_timeout: 0 + } + } +} + +node { + calculator: "HolisticLandmarkCpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "POSE_LANDMARKS:pose_landmarks" + output_stream: "POSE_ROI:pose_roi" + output_stream: "POSE_DETECTION:pose_detection" + output_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +} + +# Gets image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:throttled_input_video" + output_stream: "SIZE:image_size" +} + +# Converts pose, hands and face landmarks to a render data vector. +node { + calculator: "HolisticTrackingToRenderData" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "POSE_LANDMARKS:pose_landmarks" + input_stream: "POSE_ROI:pose_roi" + input_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + input_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA_VECTOR:render_data_vector" +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "VECTOR:render_data_vector" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt b/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt new file mode 100644 index 0000000..dc85be4 --- /dev/null +++ b/mediapipe/graphs/holistic_tracking/holistic_tracking_gpu.pbtxt @@ -0,0 +1,75 @@ +# Tracks and renders pose + hands + face landmarks. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# GPU image with rendered results. (GpuBuffer) +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" + node_options: { + [type.googleapis.com/mediapipe.FlowLimiterCalculatorOptions] { + max_in_flight: 1 + max_in_queue: 1 + # Timeout is disabled (set to 0) as first frame processing can take more + # than 1 second. + in_flight_timeout: 0 + } + } +} + +node { + calculator: "HolisticLandmarkGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "POSE_LANDMARKS:pose_landmarks" + output_stream: "POSE_ROI:pose_roi" + output_stream: "POSE_DETECTION:pose_detection" + output_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +} + +# Gets image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + output_stream: "SIZE:image_size" +} + +# Converts pose, hands and face landmarks to a render data vector. +node { + calculator: "HolisticTrackingToRenderData" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "POSE_LANDMARKS:pose_landmarks" + input_stream: "POSE_ROI:pose_roi" + input_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + input_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA_VECTOR:render_data_vector" +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "VECTOR:render_data_vector" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/holistic_tracking/holistic_tracking_to_render_data.pbtxt b/mediapipe/graphs/holistic_tracking/holistic_tracking_to_render_data.pbtxt new file mode 100644 index 0000000..4b05123 --- /dev/null +++ b/mediapipe/graphs/holistic_tracking/holistic_tracking_to_render_data.pbtxt @@ -0,0 +1,757 @@ +# Converts pose + hands + face landmarks to a render data vector. + +type: "HolisticTrackingToRenderData" + +# Image size. (std::pair) +input_stream: "IMAGE_SIZE:image_size" +# Pose landmarks. (NormalizedLandmarkList) +input_stream: "POSE_LANDMARKS:landmarks" +# Region of interest calculated based on pose landmarks. (NormalizedRect) +input_stream: "POSE_ROI:roi" +# Left hand landmarks. (NormalizedLandmarkList) +input_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# Right hand landmarks. (NormalizedLandmarkList) +input_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +# Face landmarks. (NormalizedLandmarkList) +input_stream: "FACE_LANDMARKS:face_landmarks" + +# Render data vector. (std::vector) +output_stream: "RENDER_DATA_VECTOR:render_data_vector" + +# --------------------------------------------------------------------------- # +# ------------------ Calculates scale for render objects -------------------- # +# --------------------------------------------------------------------------- # + +# Calculates rendering scale based on the pose bounding box. +node { + calculator: "RectToRenderScaleCalculator" + input_stream: "NORM_RECT:roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "RENDER_SCALE:render_scale" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderScaleCalculatorOptions] { + multiplier: 0.0008 + } + } +} + +# --------------------------------------------------------------------------- # +# --------------- Combines pose and hands into pose skeleton ---------------- # +# --------------------------------------------------------------------------- # + +# Gets pose landmarks before wrists. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks" + output_stream: "landmarks_before_wrist" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 11 end: 15 } + } + } +} + +# Gets pose left wrist landmark. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks" + output_stream: "landmarks_left_wrist" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 15 end: 16 } + } + } +} + +# Gets pose right wrist landmark. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks" + output_stream: "landmarks_right_wrist" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 16 end: 17 } + } + } +} + +# Gets pose landmarks after wrists. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks" + output_stream: "landmarks_after_wrist" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 23 end: 33 } + } + } +} + +# Gets left hand wrist landmark. +node { + calculator: "HandWristForPose" + input_stream: "HAND_LANDMARKS:left_hand_landmarks" + output_stream: "WRIST_LANDMARK:left_hand_wrist_landmark" +} + +# Gets left hand wrist landmark or keep pose wrist landmark if hand was not +# predicted. +node { + calculator: "MergeCalculator" + input_stream: "left_hand_wrist_landmark" + input_stream: "landmarks_left_wrist" + output_stream: "merged_left_hand_wrist_landmark" +} + +# Gets right hand wrist landmark. +node { + calculator: "HandWristForPose" + input_stream: "HAND_LANDMARKS:right_hand_landmarks" + output_stream: "WRIST_LANDMARK:right_hand_wrist_landmark" +} + +# Gets right hand wrist landmark or keep pose wrist landmark if hand was not +# predicted. +node { + calculator: "MergeCalculator" + input_stream: "right_hand_wrist_landmark" + input_stream: "landmarks_right_wrist" + output_stream: "merged_right_hand_wrist_landmark" +} + +# Combines pose landmarks all together. +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "landmarks_before_wrist" + input_stream: "merged_left_hand_wrist_landmark" + input_stream: "merged_right_hand_wrist_landmark" + input_stream: "landmarks_after_wrist" + output_stream: "landmarks_merged" + node_options: { + [type.googleapis.com/mediapipe.ConcatenateVectorCalculatorOptions] { + only_emit_if_all_present: true + } + } +} + +# Takes left pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks_merged" + output_stream: "landmarks_left_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 2 end: 3 } + ranges: { begin: 4 end: 5 } + ranges: { begin: 6 end: 7 } + ranges: { begin: 8 end: 9 } + ranges: { begin: 10 end: 11 } + ranges: { begin: 12 end: 13 } + ranges: { begin: 14 end: 15 } + combine_outputs: true + } + } +} + +# Takes right pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks_merged" + output_stream: "landmarks_right_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 1 end: 2 } + ranges: { begin: 3 end: 4 } + ranges: { begin: 5 end: 6 } + ranges: { begin: 7 end: 8 } + ranges: { begin: 9 end: 10 } + ranges: { begin: 11 end: 12 } + ranges: { begin: 13 end: 14 } + ranges: { begin: 15 end: 16 } + combine_outputs: true + } + } +} + +# --------------------------------------------------------------------------- # +# ---------------------------------- Pose ----------------------------------- # +# --------------------------------------------------------------------------- # + +# Converts pose connections to white lines. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_merged" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 0 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 4 + landmark_connections: 1 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 5 + landmark_connections: 0 + landmark_connections: 6 + landmark_connections: 1 + landmark_connections: 7 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 6 + landmark_connections: 8 + landmark_connections: 7 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 10 + landmark_connections: 9 + landmark_connections: 11 + landmark_connections: 10 + landmark_connections: 12 + landmark_connections: 11 + landmark_connections: 13 + landmark_connections: 12 + landmark_connections: 14 + landmark_connections: 13 + landmark_connections: 15 + landmark_connections: 10 + landmark_connections: 14 + landmark_connections: 11 + landmark_connections: 15 + + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.1 + } + } +} + +# Converts pose joints to big white circles. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_merged" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_background_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 5.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Converts pose left side joints to orange circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_left_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_left_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 138 b: 0 } + connection_color { r: 255 g: 138 b: 0 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Converts pose right side joints to cyan circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_right_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_right_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 217 b: 231 } + connection_color { r: 0 g: 217 b: 231 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# --------------------------------------------------------------------------- # +# ------------------------------- Left hand --------------------------------- # +# --------------------------------------------------------------------------- # + +# Converts left hand connections to white lines. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:left_hand_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:left_hand_landmarks_connections_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 0 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 5 + landmark_connections: 9 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 9 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 16 + landmark_connections: 13 + landmark_connections: 17 + landmark_connections: 0 + landmark_connections: 17 + landmark_connections: 17 + landmark_connections: 18 + landmark_connections: 18 + landmark_connections: 19 + landmark_connections: 19 + landmark_connections: 20 + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 4.0 + visualize_landmark_depth: false + } + } +} + +# Converts left hand color joints. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:left_hand_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:left_hand_landmarks_joints_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 138 b: 0 } + connection_color { r: 255 g: 138 b: 0 } + thickness: 3.0 + visualize_landmark_depth: false + } + } +} + +# --------------------------------------------------------------------------- # +# -------------------------------- Right hand ------------------------------- # +# --------------------------------------------------------------------------- # + +# Converts right hand connections to white lines. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:right_hand_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:right_hand_landmarks_connections_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 0 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 5 + landmark_connections: 9 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 9 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 16 + landmark_connections: 13 + landmark_connections: 17 + landmark_connections: 0 + landmark_connections: 17 + landmark_connections: 17 + landmark_connections: 18 + landmark_connections: 18 + landmark_connections: 19 + landmark_connections: 19 + landmark_connections: 20 + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 4.0 + visualize_landmark_depth: false + } + } +} + +# Converts right hand color joints. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:right_hand_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:right_hand_landmarks_joints_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 217 b: 231 } + connection_color { r: 0 g: 217 b: 231 } + thickness: 3.0 + visualize_landmark_depth: false + } + } +} + +# --------------------------------------------------------------------------- # +# ---------------------------------- Face ----------------------------------- # +# --------------------------------------------------------------------------- # + +# Converts face connections to white lines. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:face_landmarks_connections_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + # Lips. + landmark_connections: 61 + landmark_connections: 146 + landmark_connections: 146 + landmark_connections: 91 + landmark_connections: 91 + landmark_connections: 181 + landmark_connections: 181 + landmark_connections: 84 + landmark_connections: 84 + landmark_connections: 17 + landmark_connections: 17 + landmark_connections: 314 + landmark_connections: 314 + landmark_connections: 405 + landmark_connections: 405 + landmark_connections: 321 + landmark_connections: 321 + landmark_connections: 375 + landmark_connections: 375 + landmark_connections: 291 + landmark_connections: 61 + landmark_connections: 185 + landmark_connections: 185 + landmark_connections: 40 + landmark_connections: 40 + landmark_connections: 39 + landmark_connections: 39 + landmark_connections: 37 + landmark_connections: 37 + landmark_connections: 0 + landmark_connections: 0 + landmark_connections: 267 + landmark_connections: 267 + landmark_connections: 269 + landmark_connections: 269 + landmark_connections: 270 + landmark_connections: 270 + landmark_connections: 409 + landmark_connections: 409 + landmark_connections: 291 + landmark_connections: 78 + landmark_connections: 95 + landmark_connections: 95 + landmark_connections: 88 + landmark_connections: 88 + landmark_connections: 178 + landmark_connections: 178 + landmark_connections: 87 + landmark_connections: 87 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 317 + landmark_connections: 317 + landmark_connections: 402 + landmark_connections: 402 + landmark_connections: 318 + landmark_connections: 318 + landmark_connections: 324 + landmark_connections: 324 + landmark_connections: 308 + landmark_connections: 78 + landmark_connections: 191 + landmark_connections: 191 + landmark_connections: 80 + landmark_connections: 80 + landmark_connections: 81 + landmark_connections: 81 + landmark_connections: 82 + landmark_connections: 82 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 312 + landmark_connections: 312 + landmark_connections: 311 + landmark_connections: 311 + landmark_connections: 310 + landmark_connections: 310 + landmark_connections: 415 + landmark_connections: 415 + landmark_connections: 308 + # Left eye. + landmark_connections: 33 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 163 + landmark_connections: 163 + landmark_connections: 144 + landmark_connections: 144 + landmark_connections: 145 + landmark_connections: 145 + landmark_connections: 153 + landmark_connections: 153 + landmark_connections: 154 + landmark_connections: 154 + landmark_connections: 155 + landmark_connections: 155 + landmark_connections: 133 + landmark_connections: 33 + landmark_connections: 246 + landmark_connections: 246 + landmark_connections: 161 + landmark_connections: 161 + landmark_connections: 160 + landmark_connections: 160 + landmark_connections: 159 + landmark_connections: 159 + landmark_connections: 158 + landmark_connections: 158 + landmark_connections: 157 + landmark_connections: 157 + landmark_connections: 173 + landmark_connections: 173 + landmark_connections: 133 + # Left eyebrow. + landmark_connections: 46 + landmark_connections: 53 + landmark_connections: 53 + landmark_connections: 52 + landmark_connections: 52 + landmark_connections: 65 + landmark_connections: 65 + landmark_connections: 55 + landmark_connections: 70 + landmark_connections: 63 + landmark_connections: 63 + landmark_connections: 105 + landmark_connections: 105 + landmark_connections: 66 + landmark_connections: 66 + landmark_connections: 107 + # Right eye. + landmark_connections: 263 + landmark_connections: 249 + landmark_connections: 249 + landmark_connections: 390 + landmark_connections: 390 + landmark_connections: 373 + landmark_connections: 373 + landmark_connections: 374 + landmark_connections: 374 + landmark_connections: 380 + landmark_connections: 380 + landmark_connections: 381 + landmark_connections: 381 + landmark_connections: 382 + landmark_connections: 382 + landmark_connections: 362 + landmark_connections: 263 + landmark_connections: 466 + landmark_connections: 466 + landmark_connections: 388 + landmark_connections: 388 + landmark_connections: 387 + landmark_connections: 387 + landmark_connections: 386 + landmark_connections: 386 + landmark_connections: 385 + landmark_connections: 385 + landmark_connections: 384 + landmark_connections: 384 + landmark_connections: 398 + landmark_connections: 398 + landmark_connections: 362 + # Right eyebrow. + landmark_connections: 276 + landmark_connections: 283 + landmark_connections: 283 + landmark_connections: 282 + landmark_connections: 282 + landmark_connections: 295 + landmark_connections: 295 + landmark_connections: 285 + landmark_connections: 300 + landmark_connections: 293 + landmark_connections: 293 + landmark_connections: 334 + landmark_connections: 334 + landmark_connections: 296 + landmark_connections: 296 + landmark_connections: 336 + # Face oval. + landmark_connections: 10 + landmark_connections: 338 + landmark_connections: 338 + landmark_connections: 297 + landmark_connections: 297 + landmark_connections: 332 + landmark_connections: 332 + landmark_connections: 284 + landmark_connections: 284 + landmark_connections: 251 + landmark_connections: 251 + landmark_connections: 389 + landmark_connections: 389 + landmark_connections: 356 + landmark_connections: 356 + landmark_connections: 454 + landmark_connections: 454 + landmark_connections: 323 + landmark_connections: 323 + landmark_connections: 361 + landmark_connections: 361 + landmark_connections: 288 + landmark_connections: 288 + landmark_connections: 397 + landmark_connections: 397 + landmark_connections: 365 + landmark_connections: 365 + landmark_connections: 379 + landmark_connections: 379 + landmark_connections: 378 + landmark_connections: 378 + landmark_connections: 400 + landmark_connections: 400 + landmark_connections: 377 + landmark_connections: 377 + landmark_connections: 152 + landmark_connections: 152 + landmark_connections: 148 + landmark_connections: 148 + landmark_connections: 176 + landmark_connections: 176 + landmark_connections: 149 + landmark_connections: 149 + landmark_connections: 150 + landmark_connections: 150 + landmark_connections: 136 + landmark_connections: 136 + landmark_connections: 172 + landmark_connections: 172 + landmark_connections: 58 + landmark_connections: 58 + landmark_connections: 132 + landmark_connections: 132 + landmark_connections: 93 + landmark_connections: 93 + landmark_connections: 234 + landmark_connections: 234 + landmark_connections: 127 + landmark_connections: 127 + landmark_connections: 162 + landmark_connections: 162 + landmark_connections: 21 + landmark_connections: 21 + landmark_connections: 54 + landmark_connections: 54 + landmark_connections: 103 + landmark_connections: 103 + landmark_connections: 67 + landmark_connections: 67 + landmark_connections: 109 + landmark_connections: 109 + landmark_connections: 10 + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 0.5 + visualize_landmark_depth: false + } + } +} + +# Converts face joints to cyan circles. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:face_landmarks_joints_rd" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 217 b: 231 } + connection_color { r: 0 g: 217 b: 231 } + thickness: 0.5 + visualize_landmark_depth: false + } + } +} + +# Concatenates all render data. +node { + calculator: "ConcatenateRenderDataVectorCalculator" + input_stream: "landmarks_render_data" + input_stream: "landmarks_background_joints_render_data" + input_stream: "landmarks_left_joints_render_data" + input_stream: "landmarks_right_joints_render_data" + + # Left hand. + input_stream: "left_hand_landmarks_connections_rd" + input_stream: "left_hand_landmarks_joints_rd" + + # Right hand. + input_stream: "right_hand_landmarks_connections_rd" + input_stream: "right_hand_landmarks_joints_rd" + + # Face. + input_stream: "face_landmarks_connections_rd" + input_stream: "face_landmarks_joints_rd" + + output_stream: "render_data_vector" +} diff --git a/mediapipe/graphs/instant_motion_tracking/BUILD b/mediapipe/graphs/instant_motion_tracking/BUILD new file mode 100644 index 0000000..e9be587 --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/BUILD @@ -0,0 +1,39 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "instant_motion_tracking_deps", + deps = [ + "//mediapipe/graphs/instant_motion_tracking/calculators:matrices_manager_calculator", + "//mediapipe/graphs/instant_motion_tracking/calculators:sticker_manager_calculator", + "//mediapipe/graphs/instant_motion_tracking/subgraphs:region_tracking", + "//mediapipe/graphs/object_detection_3d/calculators:gl_animation_overlay_calculator", + ], +) + +mediapipe_binary_graph( + name = "instant_motion_tracking_binary_graph", + graph = "instant_motion_tracking.pbtxt", + output_name = "instant_motion_tracking.binarypb", + deps = [":instant_motion_tracking_deps"], +) diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/BUILD b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD new file mode 100644 index 0000000..93af68c --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD @@ -0,0 +1,84 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "sticker_buffer_proto", + srcs = [ + "sticker_buffer.proto", + ], +) + +mediapipe_cc_proto_library( + name = "sticker_buffer_cc_proto", + srcs = [ + "sticker_buffer.proto", + ], + visibility = ["//visibility:public"], + deps = [ + ":sticker_buffer_proto", + ], +) + +cc_library( + name = "sticker_manager_calculator", + srcs = ["sticker_manager_calculator.cc"], + hdrs = ["transformations.h"], + deps = [ + ":sticker_buffer_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:timestamp", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + ], + alwayslink = 1, +) + +cc_library( + name = "matrices_manager_calculator", + srcs = ["matrices_manager_calculator.cc"], + hdrs = ["transformations.h"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:timestamp", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/graphs/object_detection_3d/calculators:model_matrix_cc_proto", + "//mediapipe/modules/objectron/calculators:box", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings", + "@eigen_archive//:eigen3", + ], + alwayslink = 1, +) + +cc_library( + name = "tracked_anchor_manager_calculator", + srcs = ["tracked_anchor_manager_calculator.cc"], + hdrs = ["transformations.h"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/util/tracking:box_tracker_cc_proto", + ], + alwayslink = 1, +) diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc b/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc new file mode 100644 index 0000000..c003135 --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc @@ -0,0 +1,393 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "Eigen/Core" +#include "Eigen/Dense" +#include "Eigen/Geometry" +#include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/instant_motion_tracking/calculators/transformations.h" +#include "mediapipe/graphs/object_detection_3d/calculators/model_matrix.pb.h" +#include "mediapipe/modules/objectron/calculators/box.h" + +namespace mediapipe { + +namespace { +using Matrix4fCM = Eigen::Matrix; +using Vector3f = Eigen::Vector3f; +using Matrix3f = Eigen::Matrix3f; +using DiagonalMatrix3f = Eigen::DiagonalMatrix; +constexpr char kAnchorsTag[] = "ANCHORS"; +constexpr char kIMUMatrixTag[] = "IMU_ROTATION"; +constexpr char kUserRotationsTag[] = "USER_ROTATIONS"; +constexpr char kUserScalingsTag[] = "USER_SCALINGS"; +constexpr char kRendersTag[] = "RENDER_DATA"; +constexpr char kGifAspectRatioTag[] = "GIF_ASPECT_RATIO"; +constexpr char kFOVSidePacketTag[] = "FOV"; +constexpr char kAspectRatioSidePacketTag[] = "ASPECT_RATIO"; +// initial Z value (-10 is center point in visual range for OpenGL render) +constexpr float kInitialZ = -10.0f; +} // namespace + +// Intermediary for rotation and translation data to model matrix usable by +// gl_animation_overlay_calculator. For information on the construction of +// OpenGL objects and transformations (including a breakdown of model matrices), +// please visit: https://open.gl/transformations +// +// Input Side Packets: +// FOV - Vertical field of view for device [REQUIRED - Defines perspective +// matrix] ASPECT_RATIO - Aspect ratio of device [REQUIRED - Defines +// perspective matrix] +// +// Input streams: +// ANCHORS - Anchor data with x,y,z coordinates (x,y are in [0.0-1.0] range for +// position on the device screen, while z is the scaling factor that changes +// in proportion to the distance from the tracked region) [REQUIRED] +// IMU_ROTATION - float[9] of row-major device rotation matrix [REQUIRED] +// USER_ROTATIONS - UserRotations with corresponding radians of rotation +// [REQUIRED] +// USER_SCALINGS - UserScalings with corresponding scale factor [REQUIRED] +// GIF_ASPECT_RATIO - Aspect ratio of GIF image used to dynamically scale +// GIF asset defined as width / height [OPTIONAL] +// Output: +// MATRICES - TimedModelMatrixProtoList of each object type to render +// [REQUIRED] +// +// Example config: +// node{ +// calculator: "MatricesManagerCalculator" +// input_stream: "ANCHORS:tracked_scaled_anchor_data" +// input_stream: "IMU_ROTATION:imu_rotation_matrix" +// input_stream: "USER_ROTATIONS:user_rotation_data" +// input_stream: "USER_SCALINGS:user_scaling_data" +// input_stream: "GIF_ASPECT_RATIO:gif_aspect_ratio" +// output_stream: "MATRICES:0:first_render_matrices" +// output_stream: "MATRICES:1:second_render_matrices" [unbounded input size] +// input_side_packet: "FOV:vertical_fov_radians" +// input_side_packet: "ASPECT_RATIO:aspect_ratio" +// } + +class MatricesManagerCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + // Device properties that will be preset by side packets + float vertical_fov_radians_ = 0.0f; + float aspect_ratio_ = 0.0f; + float gif_aspect_ratio_ = 1.0f; + + const Matrix3f GenerateUserRotationMatrix(const float rotation_radians) const; + const Matrix4fCM GenerateEigenModelMatrix( + const Vector3f& translation_vector, + const Matrix3f& rotation_submatrix) const; + const Vector3f GenerateAnchorVector(const Anchor& tracked_anchor) const; + DiagonalMatrix3f GetDefaultRenderScaleDiagonal( + const int render_id, const float user_scale_factor, + const float gif_aspect_ratio) const; + + // Returns a user scaling increment associated with the sticker_id + // TODO: Adjust lookup function if total number of stickers is uncapped to + // improve performance + const float GetUserScaler(const std::vector& scalings, + const int sticker_id) const { + for (const UserScaling& user_scaling : scalings) { + if (user_scaling.sticker_id == sticker_id) { + return user_scaling.scale_factor; + } + } + LOG(WARNING) << "Cannot find sticker_id: " << sticker_id + << ", returning 1.0f scaling"; + return 1.0f; + } + + // Returns a user rotation in radians associated with the sticker_id + const float GetUserRotation(const std::vector& rotations, + const int sticker_id) { + for (const UserRotation& rotation : rotations) { + if (rotation.sticker_id == sticker_id) { + return rotation.rotation_radians; + } + } + LOG(WARNING) << "Cannot find sticker_id: " << sticker_id + << ", returning 0.0f rotation"; + return 0.0f; + } +}; + +REGISTER_CALCULATOR(MatricesManagerCalculator); + +absl::Status MatricesManagerCalculator::GetContract(CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kAnchorsTag) && + cc->Inputs().HasTag(kIMUMatrixTag) && + cc->Inputs().HasTag(kUserRotationsTag) && + cc->Inputs().HasTag(kUserScalingsTag) && + cc->InputSidePackets().HasTag(kFOVSidePacketTag) && + cc->InputSidePackets().HasTag(kAspectRatioSidePacketTag)); + + cc->Inputs().Tag(kAnchorsTag).Set>(); + cc->Inputs().Tag(kIMUMatrixTag).Set(); + cc->Inputs().Tag(kUserScalingsTag).Set>(); + cc->Inputs().Tag(kUserRotationsTag).Set>(); + cc->Inputs().Tag(kRendersTag).Set>(); + if (cc->Inputs().HasTag(kGifAspectRatioTag)) { + cc->Inputs().Tag(kGifAspectRatioTag).Set(); + } + + for (CollectionItemId id = cc->Outputs().BeginId("MATRICES"); + id < cc->Outputs().EndId("MATRICES"); ++id) { + cc->Outputs().Get(id).Set(); + } + cc->InputSidePackets().Tag(kFOVSidePacketTag).Set(); + cc->InputSidePackets().Tag(kAspectRatioSidePacketTag).Set(); + + return absl::OkStatus(); +} + +absl::Status MatricesManagerCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + // Set device properties from side packets + vertical_fov_radians_ = + cc->InputSidePackets().Tag(kFOVSidePacketTag).Get(); + aspect_ratio_ = + cc->InputSidePackets().Tag(kAspectRatioSidePacketTag).Get(); + return absl::OkStatus(); +} + +absl::Status MatricesManagerCalculator::Process(CalculatorContext* cc) { + // Define each object's model matrices + auto asset_matrices_gif = + std::make_unique(); + auto asset_matrices_1 = + std::make_unique(); + // Clear all model matrices + asset_matrices_gif->clear_model_matrix(); + asset_matrices_1->clear_model_matrix(); + + const std::vector user_rotation_data = + cc->Inputs().Tag(kUserRotationsTag).Get>(); + + const std::vector user_scaling_data = + cc->Inputs().Tag(kUserScalingsTag).Get>(); + + const std::vector render_data = + cc->Inputs().Tag(kRendersTag).Get>(); + + const std::vector anchor_data = + cc->Inputs().Tag(kAnchorsTag).Get>(); + if (cc->Inputs().HasTag(kGifAspectRatioTag) && + !cc->Inputs().Tag(kGifAspectRatioTag).IsEmpty()) { + gif_aspect_ratio_ = cc->Inputs().Tag(kGifAspectRatioTag).Get(); + } + + // Device IMU rotation submatrix + const auto imu_matrix = cc->Inputs().Tag(kIMUMatrixTag).Get(); + Matrix3f imu_rotation_submatrix; + int idx = 0; + for (int x = 0; x < 3; ++x) { + for (int y = 0; y < 3; ++y) { + // Input matrix is row-major matrix, it must be reformatted to + // column-major via transpose procedure + imu_rotation_submatrix(y, x) = imu_matrix[idx++]; + } + } + + int render_idx = 0; + for (const Anchor& anchor : anchor_data) { + const int id = anchor.sticker_id; + mediapipe::TimedModelMatrixProto* model_matrix; + // Add model matrix to matrices list for defined object render ID + if (render_data[render_idx] == 0) { // GIF + model_matrix = asset_matrices_gif->add_model_matrix(); + } else { // Asset 3D + if (render_data[render_idx] != 1) { + LOG(ERROR) << "render id: " << render_data[render_idx] + << " is not supported. Fall back to using render_id = 1."; + } + model_matrix = asset_matrices_1->add_model_matrix(); + } + + model_matrix->set_id(id); + + // The user transformation data associated with this sticker must be defined + const float user_rotation_radians = GetUserRotation(user_rotation_data, id); + const float user_scale_factor = GetUserScaler(user_scaling_data, id); + + // A vector representative of a user's sticker rotation transformation can + // be created + const Matrix3f user_rotation_submatrix = + GenerateUserRotationMatrix(user_rotation_radians); + // Next, the diagonal representative of the combined scaling data + const DiagonalMatrix3f scaling_diagonal = GetDefaultRenderScaleDiagonal( + render_data[render_idx], user_scale_factor, gif_aspect_ratio_); + // Increment to next render id from vector + render_idx++; + + // The user transformation data can be concatenated into a final rotation + // submatrix with the device IMU rotational data + const Matrix3f user_transformed_rotation_submatrix = + imu_rotation_submatrix * user_rotation_submatrix * scaling_diagonal; + + // A vector representative of the translation of the object in OpenGL + // coordinate space must be generated + const Vector3f translation_vector = GenerateAnchorVector(anchor); + + // Concatenate all model matrix data + const Matrix4fCM final_model_matrix = GenerateEigenModelMatrix( + translation_vector, user_transformed_rotation_submatrix); + + // The generated model matrix must be mapped to TimedModelMatrixProto + // (col-wise) + for (int x = 0; x < final_model_matrix.rows(); ++x) { + for (int y = 0; y < final_model_matrix.cols(); ++y) { + model_matrix->add_matrix_entries(final_model_matrix(x, y)); + } + } + } + + // Output all individual render matrices + // TODO: Perform depth ordering with gl_animation_overlay_calculator to render + // objects in order by depth to allow occlusion. + cc->Outputs() + .Get(cc->Outputs().GetId("MATRICES", 0)) + .Add(asset_matrices_gif.release(), cc->InputTimestamp()); + cc->Outputs() + .Get(cc->Outputs().GetId("MATRICES", 1)) + .Add(asset_matrices_1.release(), cc->InputTimestamp()); + + return absl::OkStatus(); +} + +// Using a specified rotation value in radians, generate a rotation matrix for +// use with base rotation submatrix +const Matrix3f MatricesManagerCalculator::GenerateUserRotationMatrix( + const float rotation_radians) const { + Eigen::Matrix3f user_rotation_submatrix; + user_rotation_submatrix = + // The rotation in radians must be inverted to rotate the object + // with the direction of finger movement from the user (system dependent) + Eigen::AngleAxisf(-rotation_radians, Eigen::Vector3f::UnitY()) * + Eigen::AngleAxisf(0.0f, Eigen::Vector3f::UnitZ()) * + // Model orientations all assume z-axis is up, but we need y-axis upwards, + // therefore, a +(M_PI * 0.5f) transformation must be applied + // TODO: Bring default rotations, translations, and scalings into + // independent sticker configuration + Eigen::AngleAxisf(M_PI * 0.5f, Eigen::Vector3f::UnitX()); + // Matrix must be transposed due to the method of submatrix generation in + // Eigen + return user_rotation_submatrix.transpose(); +} + +// TODO: Investigate possible differences in warping of tracking speed across +// screen Using the sticker anchor data, a translation vector can be generated +// in OpenGL coordinate space +const Vector3f MatricesManagerCalculator::GenerateAnchorVector( + const Anchor& tracked_anchor) const { + // Using an initial z-value in OpenGL space, generate a new base z-axis value + // to mimic scaling by distance. + const float z = kInitialZ * tracked_anchor.z; + + // Using triangle geometry, the minimum for a y-coordinate that will appear in + // the view field for the given z value above can be found. + const float y_half_range = z * (tan(vertical_fov_radians_ * 0.5f)); + + // The aspect ratio of the device and y_minimum calculated above can be used + // to find the minimum value for x that will appear in the view field of the + // device screen. + const float x_half_range = y_half_range * aspect_ratio_; + + // Given the minimum bounds of the screen in OpenGL space, the tracked anchor + // coordinates can be converted to OpenGL coordinate space. + // + // (i.e: X and Y will be converted from [0.0-1.0] space to [x_minimum, + // -x_minimum] space and [y_minimum, -y_minimum] space respectively) + const float x = (-2.0f * tracked_anchor.x * x_half_range) + x_half_range; + const float y = (-2.0f * tracked_anchor.y * y_half_range) + y_half_range; + + // A translation transformation vector can be generated via Eigen + const Vector3f t_vector(x, y, z); + return t_vector; +} + +// Generates a model matrix via Eigen with appropriate transformations +const Matrix4fCM MatricesManagerCalculator::GenerateEigenModelMatrix( + const Vector3f& translation_vector, + const Matrix3f& rotation_submatrix) const { + // Define basic empty model matrix + Matrix4fCM mvp_matrix; + + // Set the translation vector + mvp_matrix.topRightCorner<3, 1>() = translation_vector; + + // Set the rotation submatrix + mvp_matrix.topLeftCorner<3, 3>() = rotation_submatrix; + + // Set trailing 1.0 required by OpenGL to define coordinate space + mvp_matrix(3, 3) = 1.0f; + + return mvp_matrix; +} + +// This returns a scaling matrix to alter the projection matrix for +// the specified render id in order to ensure all objects render at a similar +// size in the view screen upon initial placement +DiagonalMatrix3f MatricesManagerCalculator::GetDefaultRenderScaleDiagonal( + const int render_id, const float user_scale_factor, + const float gif_aspect_ratio) const { + float scale_preset = 1.0f; + float x_scalar = 1.0f; + float y_scalar = 1.0f; + + switch (render_id) { + case 0: { // GIF + // 160 is the scaling preset to make the GIF asset appear relatively + // similar in size to all other assets + scale_preset = 160.0f; + if (gif_aspect_ratio >= 1.0f) { + // GIF is wider horizontally (scale on x-axis) + x_scalar = gif_aspect_ratio; + } else { + // GIF is wider vertically (scale on y-axis) + y_scalar = 1.0f / gif_aspect_ratio; + } + break; + } + case 1: { // 3D asset + // 5 is the scaling preset to make the 3D asset appear relatively + // similar in size to all other assets + scale_preset = 5.0f; + break; + } + default: { + LOG(INFO) << "Unsupported render_id: " << render_id + << ", returning default render_scale"; + break; + } + } + + DiagonalMatrix3f scaling(scale_preset * user_scale_factor * x_scalar, + scale_preset * user_scale_factor * y_scalar, + scale_preset * user_scale_factor); + return scaling; +} +} // namespace mediapipe diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.proto b/mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.proto new file mode 100644 index 0000000..b73209c --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.proto @@ -0,0 +1,33 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +option java_package = "com.google.mediapipe.graphs.instantmotiontracking"; +option java_outer_classname = "StickerBufferProto"; + +message Sticker { + optional int32 id = 1; + optional float x = 2; + optional float y = 3; + optional float rotation = 4; + optional float scale = 5; + optional int32 render_id = 6; +} + +message StickerRoll { + repeated Sticker sticker = 1; +} diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/sticker_manager_calculator.cc b/mediapipe/graphs/instant_motion_tracking/calculators/sticker_manager_calculator.cc new file mode 100644 index 0000000..40210c2 --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/sticker_manager_calculator.cc @@ -0,0 +1,150 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/instant_motion_tracking/calculators/sticker_buffer.pb.h" +#include "mediapipe/graphs/instant_motion_tracking/calculators/transformations.h" + +namespace mediapipe { + +constexpr char kProtoDataString[] = "PROTO"; +constexpr char kAnchorsTag[] = "ANCHORS"; +constexpr char kUserRotationsTag[] = "USER_ROTATIONS"; +constexpr char kUserScalingsTag[] = "USER_SCALINGS"; +constexpr char kRenderDescriptorsTag[] = "RENDER_DATA"; + +// This calculator takes in the sticker protobuffer data and parses each +// individual sticker object into anchors, user rotations and scalings, in +// addition to basic render data represented in integer form. +// +// Input: +// PROTO - String of sticker data in appropriate protobuf format [REQUIRED] +// Output: +// ANCHORS - Anchors with initial normalized X,Y coordinates [REQUIRED] +// USER_ROTATIONS - UserRotations with radians of rotation from user [REQUIRED] +// USER_SCALINGS - UserScalings with increment of scaling from user [REQUIRED] +// RENDER_DATA - Descriptors of which objects/animations to render for stickers +// [REQUIRED] +// +// Example config: +// node { +// calculator: "StickerManagerCalculator" +// input_stream: "PROTO:sticker_proto_string" +// output_stream: "ANCHORS:initial_anchor_data" +// output_stream: "USER_ROTATIONS:user_rotation_data" +// output_stream: "USER_SCALINGS:user_scaling_data" +// output_stream: "RENDER_DATA:sticker_render_data" +// } + +class StickerManagerCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kProtoDataString)); + RET_CHECK(cc->Outputs().HasTag(kAnchorsTag) && + cc->Outputs().HasTag(kUserRotationsTag) && + cc->Outputs().HasTag(kUserScalingsTag) && + cc->Outputs().HasTag(kRenderDescriptorsTag)); + + cc->Inputs().Tag(kProtoDataString).Set(); + cc->Outputs().Tag(kAnchorsTag).Set>(); + cc->Outputs().Tag(kUserRotationsTag).Set>(); + cc->Outputs().Tag(kUserScalingsTag).Set>(); + cc->Outputs().Tag(kRenderDescriptorsTag).Set>(); + + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override { + std::string sticker_proto_string = + cc->Inputs().Tag(kProtoDataString).Get(); + + std::vector initial_anchor_data; + std::vector user_rotation_data; + std::vector user_scaling_data; + std::vector render_data; + + ::mediapipe::StickerRoll sticker_roll; + bool parse_success = sticker_roll.ParseFromString(sticker_proto_string); + + // Ensure parsing was a success + RET_CHECK(parse_success) << "Error parsing sticker protobuf data"; + + for (int i = 0; i < sticker_roll.sticker().size(); ++i) { + // Declare empty structures for sticker data + Anchor initial_anchor; + UserRotation user_rotation; + UserScaling user_scaling; + // Get individual Sticker object as defined by Protobuffer + ::mediapipe::Sticker sticker = sticker_roll.sticker(i); + // Set individual data structure ids to associate with this sticker + initial_anchor.sticker_id = sticker.id(); + user_rotation.sticker_id = sticker.id(); + user_scaling.sticker_id = sticker.id(); + initial_anchor.x = sticker.x(); + initial_anchor.y = sticker.y(); + initial_anchor.z = 1.0f; // default to 1.0 in normalized 3d space + user_rotation.rotation_radians = sticker.rotation(); + user_scaling.scale_factor = sticker.scale(); + const int render_id = sticker.render_id(); + // Set all vector data with sticker attributes + initial_anchor_data.emplace_back(initial_anchor); + user_rotation_data.emplace_back(user_rotation); + user_scaling_data.emplace_back(user_scaling); + render_data.emplace_back(render_id); + } + + if (cc->Outputs().HasTag(kAnchorsTag)) { + cc->Outputs() + .Tag(kAnchorsTag) + .AddPacket(MakePacket>(initial_anchor_data) + .At(cc->InputTimestamp())); + } + if (cc->Outputs().HasTag(kUserRotationsTag)) { + cc->Outputs() + .Tag(kUserRotationsTag) + .AddPacket(MakePacket>(user_rotation_data) + .At(cc->InputTimestamp())); + } + if (cc->Outputs().HasTag(kUserScalingsTag)) { + cc->Outputs() + .Tag(kUserScalingsTag) + .AddPacket(MakePacket>(user_scaling_data) + .At(cc->InputTimestamp())); + } + if (cc->Outputs().HasTag(kRenderDescriptorsTag)) { + cc->Outputs() + .Tag(kRenderDescriptorsTag) + .AddPacket(MakePacket>(render_data) + .At(cc->InputTimestamp())); + } + + return absl::OkStatus(); + } + + absl::Status Close(CalculatorContext* cc) override { + return absl::OkStatus(); + } +}; + +REGISTER_CALCULATOR(StickerManagerCalculator); +} // namespace mediapipe diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/tracked_anchor_manager_calculator.cc b/mediapipe/graphs/instant_motion_tracking/calculators/tracked_anchor_manager_calculator.cc new file mode 100644 index 0000000..446aee7 --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/tracked_anchor_manager_calculator.cc @@ -0,0 +1,210 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/instant_motion_tracking/calculators/transformations.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { + +constexpr char kSentinelTag[] = "SENTINEL"; +constexpr char kAnchorsTag[] = "ANCHORS"; +constexpr char kBoxesInputTag[] = "BOXES"; +constexpr char kBoxesOutputTag[] = "START_POS"; +constexpr char kCancelTag[] = "CANCEL_ID"; +// TODO: Find optimal Height/Width (0.1-0.3) +constexpr float kBoxEdgeSize = + 0.2f; // Used to establish tracking box dimensions +constexpr float kUsToMs = + 1000.0f; // Used to convert from microseconds to millis + +// This calculator manages the regions being tracked for each individual sticker +// and adjusts the regions being tracked if a change is detected in a sticker's +// initial anchor placement. Regions being tracked that have no associated +// sticker will be automatically removed upon the next iteration of the graph to +// optimize performance and remove all sticker artifacts +// +// Input: +// SENTINEL - ID of sticker which has an anchor that must be reset (-1 when no +// anchor must be reset) [REQUIRED] +// ANCHORS - Initial anchor data (tracks changes and where to re/position) +// [REQUIRED] BOXES - Used in cycle, boxes being tracked meant to update +// positions [OPTIONAL +// - provided by subgraph] +// Output: +// START_POS - Positions of boxes being tracked (can be overwritten with ID) +// [REQUIRED] CANCEL_ID - Single integer ID of tracking box to remove from +// tracker subgraph [OPTIONAL] ANCHORS - Updated set of anchors with tracked +// and normalized X,Y,Z [REQUIRED] +// +// Example config: +// node { +// calculator: "TrackedAnchorManagerCalculator" +// input_stream: "SENTINEL:sticker_sentinel" +// input_stream: "ANCHORS:initial_anchor_data" +// input_stream: "BOXES:boxes" +// input_stream_info: { +// tag_index: 'BOXES' +// back_edge: true +// } +// output_stream: "START_POS:start_pos" +// output_stream: "CANCEL_ID:cancel_object_id" +// output_stream: "ANCHORS:tracked_scaled_anchor_data" +// } + +class TrackedAnchorManagerCalculator : public CalculatorBase { + private: + // Previous graph iteration anchor data + std::vector previous_anchor_data_; + + public: + static absl::Status GetContract(CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kAnchorsTag) && + cc->Inputs().HasTag(kSentinelTag)); + RET_CHECK(cc->Outputs().HasTag(kAnchorsTag) && + cc->Outputs().HasTag(kBoxesOutputTag)); + + cc->Inputs().Tag(kAnchorsTag).Set>(); + cc->Inputs().Tag(kSentinelTag).Set(); + + if (cc->Inputs().HasTag(kBoxesInputTag)) { + cc->Inputs().Tag(kBoxesInputTag).Set(); + } + + cc->Outputs().Tag(kAnchorsTag).Set>(); + cc->Outputs().Tag(kBoxesOutputTag).Set(); + + if (cc->Outputs().HasTag(kCancelTag)) { + cc->Outputs().Tag(kCancelTag).Set(); + } + + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { return absl::OkStatus(); } + + absl::Status Process(CalculatorContext* cc) override; +}; +REGISTER_CALCULATOR(TrackedAnchorManagerCalculator); + +absl::Status TrackedAnchorManagerCalculator::Process(CalculatorContext* cc) { + mediapipe::Timestamp timestamp = cc->InputTimestamp(); + const int sticker_sentinel = cc->Inputs().Tag(kSentinelTag).Get(); + std::vector current_anchor_data = + cc->Inputs().Tag(kAnchorsTag).Get>(); + auto pos_boxes = absl::make_unique(); + std::vector tracked_scaled_anchor_data; + + // Delete any boxes being tracked without an associated anchor + for (const mediapipe::TimedBoxProto& box : + cc->Inputs() + .Tag(kBoxesInputTag) + .Get() + .box()) { + bool anchor_exists = false; + for (Anchor anchor : current_anchor_data) { + if (box.id() == anchor.sticker_id) { + anchor_exists = true; + break; + } + } + if (!anchor_exists) { + cc->Outputs() + .Tag(kCancelTag) + .AddPacket(MakePacket(box.id()).At(timestamp++)); + } + } + + // Perform tracking or updating for each anchor position + for (const Anchor& anchor : current_anchor_data) { + Anchor output_anchor = anchor; + // Check if anchor position is being reset by user in this graph iteration + if (sticker_sentinel == anchor.sticker_id) { + // Delete associated tracking box + // TODO: BoxTrackingSubgraph should accept vector to avoid breaking + // timestamp rules + cc->Outputs() + .Tag(kCancelTag) + .AddPacket(MakePacket(anchor.sticker_id).At(timestamp++)); + // Add a tracking box + mediapipe::TimedBoxProto* box = pos_boxes->add_box(); + box->set_left(anchor.x - kBoxEdgeSize * 0.5f); + box->set_right(anchor.x + kBoxEdgeSize * 0.5f); + box->set_top(anchor.y - kBoxEdgeSize * 0.5f); + box->set_bottom(anchor.y + kBoxEdgeSize * 0.5f); + box->set_id(anchor.sticker_id); + box->set_time_msec((timestamp++).Microseconds() / kUsToMs); + // Default value for normalized z (scale factor) + output_anchor.z = 1.0f; + } else { + // Anchor position was not reset by user + // Attempt to update anchor position from tracking subgraph + // (TimedBoxProto) + bool updated_from_tracker = false; + const mediapipe::TimedBoxProtoList box_list = + cc->Inputs().Tag(kBoxesInputTag).Get(); + for (const auto& box : box_list.box()) { + if (box.id() == anchor.sticker_id) { + // Get center x normalized coordinate [0.0-1.0] + output_anchor.x = (box.left() + box.right()) * 0.5f; + // Get center y normalized coordinate [0.0-1.0] + output_anchor.y = (box.top() + box.bottom()) * 0.5f; + // Get center z coordinate [z starts at normalized 1.0 and scales + // inversely with box-width] + // TODO: Look into issues with uniform scaling on x-axis and y-axis + output_anchor.z = kBoxEdgeSize / (box.right() - box.left()); + updated_from_tracker = true; + break; + } + } + // If anchor position was not updated from tracker, create new tracking + // box at last recorded anchor coordinates. This will allow all current + // stickers to be tracked at approximately last location even if + // re-acquisitioning in the BoxTrackingSubgraph encounters errors + if (!updated_from_tracker) { + for (const Anchor& prev_anchor : previous_anchor_data_) { + if (anchor.sticker_id == prev_anchor.sticker_id) { + mediapipe::TimedBoxProto* box = pos_boxes->add_box(); + box->set_left(prev_anchor.x - kBoxEdgeSize * 0.5f); + box->set_right(prev_anchor.x + kBoxEdgeSize * 0.5f); + box->set_top(prev_anchor.y - kBoxEdgeSize * 0.5f); + box->set_bottom(prev_anchor.y + kBoxEdgeSize * 0.5f); + box->set_id(prev_anchor.sticker_id); + box->set_time_msec(cc->InputTimestamp().Microseconds() / kUsToMs); + output_anchor = prev_anchor; + // Default value for normalized z (scale factor) + output_anchor.z = 1.0f; + break; + } + } + } + } + tracked_scaled_anchor_data.emplace_back(output_anchor); + } + // Set anchor data for next iteration + previous_anchor_data_ = tracked_scaled_anchor_data; + + cc->Outputs() + .Tag(kAnchorsTag) + .AddPacket(MakePacket>(tracked_scaled_anchor_data) + .At(cc->InputTimestamp())); + cc->Outputs() + .Tag(kBoxesOutputTag) + .Add(pos_boxes.release(), cc->InputTimestamp()); + + return absl::OkStatus(); +} +} // namespace mediapipe diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/transformations.h b/mediapipe/graphs/instant_motion_tracking/calculators/transformations.h new file mode 100644 index 0000000..cbacdb7 --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/calculators/transformations.h @@ -0,0 +1,42 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_GRAPHS_INSTANT_MOTION_TRACKING_CALCULATORS_TRANSFORMATIONS_H_ +#define MEDIAPIPE_GRAPHS_INSTANT_MOTION_TRACKING_CALCULATORS_TRANSFORMATIONS_H_ + +namespace mediapipe { + +// Radians by which to rotate the object (Provided by UI input) +struct UserRotation { + float rotation_radians; + int sticker_id; +}; + +// Scaling factor provided by the UI application end +struct UserScaling { + float scale_factor; + int sticker_id; +}; + +// The normalized anchor coordinates of a sticker +struct Anchor { + float x; // [0.0-1.0] + float y; // [0.0-1.0] + float z; // Centered around 1.0 [current_scale = z * initial_scale] + int sticker_id; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_GRAPHS_INSTANT_MOTION_TRACKING_CALCULATORS_TRANSFORMATIONS_H_ diff --git a/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt b/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt new file mode 100644 index 0000000..468262b --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/instant_motion_tracking.pbtxt @@ -0,0 +1,80 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# MediaPipe graph that performs region tracking and 3d object (AR sticker) rendering. + +# Images in/out of graph with sticker data and IMU information from device +input_stream: "input_video" +input_stream: "sticker_sentinel" +input_stream: "sticker_proto_string" +input_stream: "imu_rotation_matrix" +input_stream: "gif_texture" +input_stream: "gif_aspect_ratio" +output_stream: "output_video" + +# Converts sticker data into user data (rotations/scalings), render data, and +# initial anchors. +node { + calculator: "StickerManagerCalculator" + input_stream: "PROTO:sticker_proto_string" + output_stream: "ANCHORS:initial_anchor_data" + output_stream: "USER_ROTATIONS:user_rotation_data" + output_stream: "USER_SCALINGS:user_scaling_data" + output_stream: "RENDER_DATA:sticker_render_data" +} + +# Uses box tracking in order to create 'anchors' for associated 3d stickers. +node { + calculator: "RegionTrackingSubgraph" + input_stream: "VIDEO:input_video" + input_stream: "SENTINEL:sticker_sentinel" + input_stream: "ANCHORS:initial_anchor_data" + output_stream: "ANCHORS:tracked_anchor_data" +} + +# Concatenates all transformations to generate model matrices for the OpenGL +# animation overlay calculator. +node { + calculator: "MatricesManagerCalculator" + input_stream: "ANCHORS:tracked_anchor_data" + input_stream: "IMU_ROTATION:imu_rotation_matrix" + input_stream: "USER_ROTATIONS:user_rotation_data" + input_stream: "USER_SCALINGS:user_scaling_data" + input_stream: "RENDER_DATA:sticker_render_data" + input_stream: "GIF_ASPECT_RATIO:gif_aspect_ratio" + output_stream: "MATRICES:0:gif_matrices" + output_stream: "MATRICES:1:asset_3d_matrices" + input_side_packet: "FOV:vertical_fov_radians" + input_side_packet: "ASPECT_RATIO:aspect_ratio" +} + +# Renders the final 3d stickers and overlays them on input image. +node { + calculator: "GlAnimationOverlayCalculator" + input_stream: "VIDEO:input_video" + input_stream: "MODEL_MATRICES:gif_matrices" + input_stream: "TEXTURE:gif_texture" + input_side_packet: "ANIMATION_ASSET:gif_asset_name" + output_stream: "asset_gif_rendered" +} + +# Renders the final 3d stickers and overlays them on top of the input image. +node { + calculator: "GlAnimationOverlayCalculator" + input_stream: "VIDEO:asset_gif_rendered" + input_stream: "MODEL_MATRICES:asset_3d_matrices" + input_side_packet: "TEXTURE:texture_3d" + input_side_packet: "ANIMATION_ASSET:asset_3d" + output_stream: "output_video" +} diff --git a/mediapipe/graphs/instant_motion_tracking/subgraphs/BUILD b/mediapipe/graphs/instant_motion_tracking/subgraphs/BUILD new file mode 100644 index 0000000..cd1561b --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/subgraphs/BUILD @@ -0,0 +1,32 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "region_tracking", + graph = "region_tracking.pbtxt", + register_as = "RegionTrackingSubgraph", + deps = [ + "//mediapipe/graphs/instant_motion_tracking/calculators:tracked_anchor_manager_calculator", + "//mediapipe/graphs/tracking/subgraphs:box_tracking_gpu", + ], +) diff --git a/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt b/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt new file mode 100644 index 0000000..f8ef3ad --- /dev/null +++ b/mediapipe/graphs/instant_motion_tracking/subgraphs/region_tracking.pbtxt @@ -0,0 +1,47 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# MediaPipe graph that performs region tracking on initial anchor positions +# provided by the application + +# Images in/out of graph with tracked and scaled normalized anchor data +type: "RegionTrackingSubgraph" +input_stream: "VIDEO:input_video" +input_stream: "SENTINEL:sticker_sentinel" +input_stream: "ANCHORS:initial_anchor_data" +output_stream: "ANCHORS:tracked_scaled_anchor_data" + +# Manages the anchors and tracking if user changes/adds/deletes anchors +node { + calculator: "TrackedAnchorManagerCalculator" + input_stream: "SENTINEL:sticker_sentinel" + input_stream: "ANCHORS:initial_anchor_data" + input_stream: "BOXES:boxes" + input_stream_info: { + tag_index: 'BOXES' + back_edge: true + } + output_stream: "START_POS:start_pos" + output_stream: "CANCEL_ID:cancel_object_id" + output_stream: "ANCHORS:tracked_scaled_anchor_data" +} + +# Subgraph performs anchor placement and tracking +node { + calculator: "BoxTrackingSubgraphGpu" + input_stream: "VIDEO:input_video" + input_stream: "BOXES:start_pos" + input_stream: "CANCEL_ID:cancel_object_id" + output_stream: "BOXES:boxes" +} diff --git a/mediapipe/graphs/iris_tracking/BUILD b/mediapipe/graphs/iris_tracking/BUILD new file mode 100644 index 0000000..86e667b --- /dev/null +++ b/mediapipe/graphs/iris_tracking/BUILD @@ -0,0 +1,86 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "iris_depth_cpu_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_file_properties_calculator", + "//mediapipe/calculators/image:opencv_encoded_image_to_image_frame_calculator", + "//mediapipe/calculators/image:opencv_image_encoder_calculator", + "//mediapipe/graphs/iris_tracking/calculators:update_face_landmarks_calculator", + "//mediapipe/graphs/iris_tracking/subgraphs:iris_and_depth_renderer_cpu", + "//mediapipe/modules/face_landmark:face_landmark_front_cpu", + "//mediapipe/modules/iris_landmark:iris_landmark_left_and_right_cpu", + ], +) + +cc_library( + name = "iris_tracking_cpu_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/graphs/iris_tracking/calculators:update_face_landmarks_calculator", + "//mediapipe/graphs/iris_tracking/subgraphs:iris_renderer_cpu", + "//mediapipe/modules/face_landmark:face_landmark_front_cpu", + "//mediapipe/modules/iris_landmark:iris_landmark_left_and_right_cpu", + ], +) + +cc_library( + name = "iris_tracking_cpu_video_input_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + "//mediapipe/graphs/iris_tracking/calculators:update_face_landmarks_calculator", + "//mediapipe/graphs/iris_tracking/subgraphs:iris_renderer_cpu", + "//mediapipe/modules/face_landmark:face_landmark_front_cpu", + "//mediapipe/modules/iris_landmark:iris_landmark_left_and_right_cpu", + ], +) + +cc_library( + name = "iris_tracking_gpu_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/graphs/iris_tracking/calculators:update_face_landmarks_calculator", + "//mediapipe/graphs/iris_tracking/subgraphs:iris_and_depth_renderer_gpu", + "//mediapipe/modules/face_landmark:face_landmark_front_gpu", + "//mediapipe/modules/iris_landmark:iris_landmark_left_and_right_gpu", + ], +) + +mediapipe_binary_graph( + name = "iris_tracking_gpu_binary_graph", + graph = "iris_tracking_gpu.pbtxt", + output_name = "iris_tracking_gpu.binarypb", + deps = [":iris_tracking_gpu_deps"], +) diff --git a/mediapipe/graphs/iris_tracking/calculators/BUILD b/mediapipe/graphs/iris_tracking/calculators/BUILD new file mode 100644 index 0000000..3a3d57a --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/BUILD @@ -0,0 +1,107 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") + +licenses(["notice"]) + +proto_library( + name = "iris_to_render_data_calculator_proto", + srcs = ["iris_to_render_data_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + "//mediapipe/util:color_proto", + "//mediapipe/util:render_data_proto", + ], +) + +mediapipe_cc_proto_library( + name = "iris_to_render_data_calculator_cc_proto", + srcs = ["iris_to_render_data_calculator.proto"], + cc_deps = [ + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/util:color_cc_proto", + "//mediapipe/util:render_data_cc_proto", + ], + visibility = ["//visibility:public"], + deps = [":iris_to_render_data_calculator_proto"], +) + +cc_library( + name = "iris_to_render_data_calculator", + srcs = ["iris_to_render_data_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":iris_to_render_data_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/util:color_cc_proto", + "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +proto_library( + name = "iris_to_depth_calculator_proto", + srcs = ["iris_to_depth_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_cc_proto_library( + name = "iris_to_depth_calculator_cc_proto", + srcs = ["iris_to_depth_calculator.proto"], + cc_deps = [ + "//mediapipe/framework:calculator_cc_proto", + ], + visibility = ["//visibility:public"], + deps = [":iris_to_depth_calculator_proto"], +) + +cc_library( + name = "iris_to_depth_calculator", + srcs = ["iris_to_depth_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":iris_to_depth_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:image_file_properties_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_library( + name = "update_face_landmarks_calculator", + srcs = ["update_face_landmarks_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:image_file_properties_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) diff --git a/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.cc b/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.cc new file mode 100644 index 0000000..3522274 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.cc @@ -0,0 +1,245 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image_file_properties.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kIrisTag[] = "IRIS"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kFocalLengthPixelTag[] = "FOCAL_LENGTH"; +constexpr char kImageFilePropertiesTag[] = "IMAGE_FILE_PROPERTIES"; +constexpr char kLeftIrisDepthTag[] = "LEFT_IRIS_DEPTH_MM"; +constexpr char kRightIrisDepthTag[] = "RIGHT_IRIS_DEPTH_MM"; +constexpr int kNumIrisLandmarksPerEye = 5; +constexpr float kDepthWeightUpdate = 0.1; +// Avergae fixed iris size across human beings. +constexpr float kIrisSizeInMM = 11.8; + +inline float GetDepth(float x0, float y0, float x1, float y1) { + return std::sqrt((x0 - x1) * (x0 - x1) + (y0 - y1) * (y0 - y1)); +} + +inline float GetLandmarkDepth(const NormalizedLandmark& ld0, + const NormalizedLandmark& ld1, + const std::pair& image_size) { + return GetDepth(ld0.x() * image_size.first, ld0.y() * image_size.second, + ld1.x() * image_size.first, ld1.y() * image_size.second); +} + +float CalculateIrisDiameter(const NormalizedLandmarkList& landmarks, + const std::pair& image_size) { + const float dist_vert = GetLandmarkDepth(landmarks.landmark(1), + landmarks.landmark(2), image_size); + const float dist_hori = GetLandmarkDepth(landmarks.landmark(3), + landmarks.landmark(4), image_size); + return (dist_hori + dist_vert) / 2.0f; +} + +float CalculateDepth(const NormalizedLandmark& center, float focal_length, + float iris_size, float img_w, float img_h) { + std::pair origin{img_w / 2.f, img_h / 2.f}; + const auto y = GetDepth(origin.first, origin.second, center.x() * img_w, + center.y() * img_h); + const auto x = std::sqrt(focal_length * focal_length + y * y); + const auto depth = kIrisSizeInMM * x / iris_size; + return depth; +} + +} // namespace + +// Estimates depth from iris to camera given focal length and image size. +// +// Usage example: +// node { +// calculator: "IrisToDepthCalculator" +// # A NormalizedLandmarkList contains landmarks for both iris. +// input_stream: "IRIS:iris_landmarks" +// input_stream: "IMAGE_SIZE:image_size" +// # Note: Only one of FOCAL_LENGTH or IMAGE_FILE_PROPERTIES is necessary +// # to get focal length in pixels. Sending focal length in pixels to +// # this calculator is optional. +// input_side_packet: "FOCAL_LENGTH:focal_length_pixel" +// # OR +// input_side_packet: "IMAGE_FILE_PROPERTIES:image_file_properties" +// output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" +// output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" +// } +class IrisToDepthCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kIrisTag).Set(); + cc->Inputs().Tag(kImageSizeTag).Set>(); + + // Only one of kFocalLengthPixelTag or kImageFilePropertiesTag must exist + // if they are present. + RET_CHECK(!(cc->InputSidePackets().HasTag(kFocalLengthPixelTag) && + cc->InputSidePackets().HasTag(kImageFilePropertiesTag))); + if (cc->InputSidePackets().HasTag(kFocalLengthPixelTag)) { + cc->InputSidePackets().Tag(kFocalLengthPixelTag).SetAny(); + } + if (cc->InputSidePackets().HasTag(kImageFilePropertiesTag)) { + cc->InputSidePackets() + .Tag(kImageFilePropertiesTag) + .Set(); + } + if (cc->Outputs().HasTag(kLeftIrisDepthTag)) { + cc->Outputs().Tag(kLeftIrisDepthTag).Set(); + } + if (cc->Outputs().HasTag(kRightIrisDepthTag)) { + cc->Outputs().Tag(kRightIrisDepthTag).Set(); + } + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override; + + absl::Status Process(CalculatorContext* cc) override; + + private: + float focal_length_pixels_ = -1.f; + // TODO: Consolidate the logic when switching to input stream for + // focal length. + bool compute_depth_from_iris_ = false; + float smoothed_left_depth_mm_ = -1.f; + float smoothed_right_depth_mm_ = -1.f; + + void GetLeftIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris); + void GetRightIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris); + ::mediapipe::IrisToDepthCalculatorOptions options_; +}; +REGISTER_CALCULATOR(IrisToDepthCalculator); + +absl::Status IrisToDepthCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + if (cc->InputSidePackets().HasTag(kFocalLengthPixelTag)) { +#if defined(__APPLE__) + focal_length_pixels_ = *cc->InputSidePackets() + .Tag(kFocalLengthPixelTag) + .Get>(); +#else + focal_length_pixels_ = + cc->InputSidePackets().Tag(kFocalLengthPixelTag).Get(); +#endif + compute_depth_from_iris_ = true; + } else if (cc->InputSidePackets().HasTag(kImageFilePropertiesTag)) { + const auto& properties = cc->InputSidePackets() + .Tag(kImageFilePropertiesTag) + .Get(); + focal_length_pixels_ = properties.focal_length_pixels(); + compute_depth_from_iris_ = true; + } + + options_ = cc->Options<::mediapipe::IrisToDepthCalculatorOptions>(); + return absl::OkStatus(); +} + +absl::Status IrisToDepthCalculator::Process(CalculatorContext* cc) { + // Only process if there's input landmarks. + if (cc->Inputs().Tag(kIrisTag).IsEmpty()) { + return absl::OkStatus(); + } + + const auto& iris_landmarks = + cc->Inputs().Tag(kIrisTag).Get(); + RET_CHECK_EQ(iris_landmarks.landmark_size(), kNumIrisLandmarksPerEye * 2) + << "Wrong number of iris landmarks"; + + std::pair image_size; + RET_CHECK(!cc->Inputs().Tag(kImageSizeTag).IsEmpty()); + image_size = cc->Inputs().Tag(kImageSizeTag).Get>(); + + auto left_iris = absl::make_unique(); + auto right_iris = absl::make_unique(); + GetLeftIris(iris_landmarks, left_iris.get()); + GetRightIris(iris_landmarks, right_iris.get()); + + const auto left_iris_size = CalculateIrisDiameter(*left_iris, image_size); + const auto right_iris_size = CalculateIrisDiameter(*right_iris, image_size); + +#if defined(__APPLE__) + if (cc->InputSidePackets().HasTag(kFocalLengthPixelTag)) { + focal_length_pixels_ = *cc->InputSidePackets() + .Tag(kFocalLengthPixelTag) + .Get>(); + } +#endif + + if (compute_depth_from_iris_ && focal_length_pixels_ > 0) { + const auto left_depth = + CalculateDepth(left_iris->landmark(0), focal_length_pixels_, + left_iris_size, image_size.first, image_size.second); + const auto right_depth = + CalculateDepth(right_iris->landmark(0), focal_length_pixels_, + right_iris_size, image_size.first, image_size.second); + smoothed_left_depth_mm_ = + smoothed_left_depth_mm_ < 0 || std::isinf(smoothed_left_depth_mm_) + ? left_depth + : smoothed_left_depth_mm_ * (1 - kDepthWeightUpdate) + + left_depth * kDepthWeightUpdate; + smoothed_right_depth_mm_ = + smoothed_right_depth_mm_ < 0 || std::isinf(smoothed_right_depth_mm_) + ? right_depth + : smoothed_right_depth_mm_ * (1 - kDepthWeightUpdate) + + right_depth * kDepthWeightUpdate; + + if (cc->Outputs().HasTag(kLeftIrisDepthTag)) { + cc->Outputs() + .Tag(kLeftIrisDepthTag) + .AddPacket(MakePacket(smoothed_left_depth_mm_) + .At(cc->InputTimestamp())); + } + if (cc->Outputs().HasTag(kRightIrisDepthTag)) { + cc->Outputs() + .Tag(kRightIrisDepthTag) + .AddPacket(MakePacket(smoothed_right_depth_mm_) + .At(cc->InputTimestamp())); + } + } + return absl::OkStatus(); +} + +void IrisToDepthCalculator::GetLeftIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris) { + // Center, top, bottom, left, right + *iris->add_landmark() = lds.landmark(options_.left_iris_center_index()); + *iris->add_landmark() = lds.landmark(options_.left_iris_top_index()); + *iris->add_landmark() = lds.landmark(options_.left_iris_bottom_index()); + *iris->add_landmark() = lds.landmark(options_.left_iris_left_index()); + *iris->add_landmark() = lds.landmark(options_.left_iris_right_index()); +} + +void IrisToDepthCalculator::GetRightIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris) { + // Center, top, bottom, left, right + *iris->add_landmark() = lds.landmark(options_.right_iris_center_index()); + *iris->add_landmark() = lds.landmark(options_.right_iris_top_index()); + *iris->add_landmark() = lds.landmark(options_.right_iris_bottom_index()); + *iris->add_landmark() = lds.landmark(options_.right_iris_left_index()); + *iris->add_landmark() = lds.landmark(options_.right_iris_right_index()); +} +} // namespace mediapipe diff --git a/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.proto b/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.proto new file mode 100644 index 0000000..786cd30 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/iris_to_depth_calculator.proto @@ -0,0 +1,39 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message IrisToDepthCalculatorOptions { + extend CalculatorOptions { + optional IrisToDepthCalculatorOptions ext = 303429002; + } + + // Indices of correspondent left iris landmarks in input stream. + optional int32 left_iris_center_index = 1 [default = 0]; + optional int32 left_iris_top_index = 2 [default = 2]; + optional int32 left_iris_bottom_index = 3 [default = 4]; + optional int32 left_iris_left_index = 4 [default = 3]; + optional int32 left_iris_right_index = 5 [default = 1]; + + // Indices of correspondent right iris landmarks in input stream. + optional int32 right_iris_center_index = 6 [default = 5]; + optional int32 right_iris_top_index = 7 [default = 7]; + optional int32 right_iris_bottom_index = 8 [default = 9]; + optional int32 right_iris_left_index = 9 [default = 6]; + optional int32 right_iris_right_index = 10 [default = 8]; +} diff --git a/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.cc b/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.cc new file mode 100644 index 0000000..c19db2a --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.cc @@ -0,0 +1,318 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.pb.h" +#include "mediapipe/util/color.pb.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kIrisTag[] = "IRIS"; +constexpr char kRenderDataTag[] = "RENDER_DATA"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kLeftIrisDepthTag[] = "LEFT_IRIS_DEPTH_MM"; +constexpr char kRightIrisDepthTag[] = "RIGHT_IRIS_DEPTH_MM"; +constexpr char kOvalLabel[] = "OVAL"; +constexpr float kFontHeightScale = 1.5f; +constexpr int kNumIrisLandmarksPerEye = 5; +// TODO: Source. +constexpr float kIrisSizeInMM = 11.8; + +inline void SetColor(RenderAnnotation* annotation, const Color& color) { + annotation->mutable_color()->set_r(color.r()); + annotation->mutable_color()->set_g(color.g()); + annotation->mutable_color()->set_b(color.b()); +} + +inline float GetDepth(float x0, float y0, float x1, float y1) { + return std::sqrt((x0 - x1) * (x0 - x1) + (y0 - y1) * (y0 - y1)); +} + +inline float GetLandmarkDepth(const NormalizedLandmark& ld0, + const NormalizedLandmark& ld1, + const std::pair& image_size) { + return GetDepth(ld0.x() * image_size.first, ld0.y() * image_size.second, + ld1.x() * image_size.first, ld1.y() * image_size.second); +} + +float CalculateIrisDiameter(const NormalizedLandmarkList& landmarks, + const std::pair& image_size) { + const float dist_vert = GetLandmarkDepth(landmarks.landmark(1), + landmarks.landmark(2), image_size); + const float dist_hori = GetLandmarkDepth(landmarks.landmark(3), + landmarks.landmark(4), image_size); + return (dist_hori + dist_vert) / 2.0f; +} + +float CalculateDepth(const NormalizedLandmark& center, float focal_length, + float iris_size, float img_w, float img_h) { + std::pair origin{img_w / 2.f, img_h / 2.f}; + const auto y = GetDepth(origin.first, origin.second, center.x() * img_w, + center.y() * img_h); + const auto x = std::sqrt(focal_length * focal_length + y * y); + const auto depth = kIrisSizeInMM * x / iris_size; + return depth; +} + +} // namespace + +// Converts iris landmarks to render data and estimates depth from the camera if +// focal length and image size. The depth will be rendered as part of the render +// data on the frame. +// +// Usage example: +// node { +// calculator: "IrisToRenderDataCalculator" +// input_stream: "IRIS:iris_landmarks" +// input_stream: "IMAGE_SIZE:image_size" +// # Note: Only one of FOCAL_LENGTH or IMAGE_FILE_PROPERTIES is necessary +// # to get focal length in pixels. Sending focal length in pixels to +// # this calculator is optional. +// input_side_packet: "FOCAL_LENGTH:focal_length_pixel" +// # OR +// input_side_packet: "IMAGE_FILE_PROPERTIES:image_file_properties" +// output_stream: "RENDER_DATA:iris_render_data" +// output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" +// output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" +// node_options: { +// [type.googleapis.com/mediapipe.IrisToRenderDataCalculatorOptions] { +// color { r: 255 g: 255 b: 255 } +// thickness: 2.0 +// font_height_px: 50 +// horizontal_offset_px: 200 +// vertical_offset_px: 200 +// location: TOP_LEFT +// } +// } +// } +class IrisToRenderDataCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kIrisTag).Set(); + cc->Outputs().Tag(kRenderDataTag).Set(); + cc->Inputs().Tag(kImageSizeTag).Set>(); + + if (cc->Inputs().HasTag(kLeftIrisDepthTag)) { + cc->Inputs().Tag(kLeftIrisDepthTag).Set(); + } + if (cc->Inputs().HasTag(kRightIrisDepthTag)) { + cc->Inputs().Tag(kRightIrisDepthTag).Set(); + } + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override; + + absl::Status Process(CalculatorContext* cc) override; + + private: + void RenderIris(const NormalizedLandmarkList& iris_landmarks, + const IrisToRenderDataCalculatorOptions& options, + const std::pair& image_size, float iris_size, + RenderData* render_data); + void GetLeftIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris); + void GetRightIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris); + + void AddTextRenderData(const IrisToRenderDataCalculatorOptions& options, + const std::pair& image_size, + const std::vector& lines, + RenderData* render_data); + + static RenderAnnotation* AddOvalRenderData( + const IrisToRenderDataCalculatorOptions& options, + RenderData* render_data); + static RenderAnnotation* AddPointRenderData( + const IrisToRenderDataCalculatorOptions& options, + RenderData* render_data); +}; +REGISTER_CALCULATOR(IrisToRenderDataCalculator); + +absl::Status IrisToRenderDataCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); +} + +absl::Status IrisToRenderDataCalculator::Process(CalculatorContext* cc) { + // Only process if there's input landmarks. + if (cc->Inputs().Tag(kIrisTag).IsEmpty()) { + return absl::OkStatus(); + } + const auto& options = + cc->Options<::mediapipe::IrisToRenderDataCalculatorOptions>(); + + const auto& iris_landmarks = + cc->Inputs().Tag(kIrisTag).Get(); + RET_CHECK_EQ(iris_landmarks.landmark_size(), kNumIrisLandmarksPerEye * 2) + << "Wrong number of iris landmarks"; + + std::pair image_size; + RET_CHECK(!cc->Inputs().Tag(kImageSizeTag).IsEmpty()); + image_size = cc->Inputs().Tag(kImageSizeTag).Get>(); + + auto render_data = absl::make_unique(); + auto left_iris = absl::make_unique(); + auto right_iris = absl::make_unique(); + GetLeftIris(iris_landmarks, left_iris.get()); + GetRightIris(iris_landmarks, right_iris.get()); + + const auto left_iris_size = CalculateIrisDiameter(*left_iris, image_size); + const auto right_iris_size = CalculateIrisDiameter(*right_iris, image_size); + RenderIris(*left_iris, options, image_size, left_iris_size, + render_data.get()); + RenderIris(*right_iris, options, image_size, right_iris_size, + render_data.get()); + + std::vector lines; + std::string line; + if (cc->Inputs().HasTag(kLeftIrisDepthTag) && + !cc->Inputs().Tag(kLeftIrisDepthTag).IsEmpty()) { + const float left_iris_depth = + cc->Inputs().Tag(kLeftIrisDepthTag).Get(); + if (!std::isinf(left_iris_depth)) { + line = "Left : "; + absl::StrAppend(&line, ":", std::round(left_iris_depth / 10), " cm"); + lines.emplace_back(line); + } + } + if (cc->Inputs().HasTag(kRightIrisDepthTag) && + !cc->Inputs().Tag(kRightIrisDepthTag).IsEmpty()) { + const float right_iris_depth = + cc->Inputs().Tag(kRightIrisDepthTag).Get(); + if (!std::isinf(right_iris_depth)) { + line = "Right : "; + absl::StrAppend(&line, ":", std::round(right_iris_depth / 10), " cm"); + lines.emplace_back(line); + } + } + AddTextRenderData(options, image_size, lines, render_data.get()); + + cc->Outputs() + .Tag(kRenderDataTag) + .Add(render_data.release(), cc->InputTimestamp()); + return absl::OkStatus(); +} + +void IrisToRenderDataCalculator::AddTextRenderData( + const IrisToRenderDataCalculatorOptions& options, + const std::pair& image_size, + const std::vector& lines, RenderData* render_data) { + int label_baseline_px = options.vertical_offset_px(); + float label_height_px = + std::ceil(options.font_height_px() * kFontHeightScale); + if (options.location() == IrisToRenderDataCalculatorOptions::TOP_LEFT) { + label_baseline_px += label_height_px; + } else if (options.location() == + IrisToRenderDataCalculatorOptions::BOTTOM_LEFT) { + label_baseline_px += image_size.second - label_height_px * lines.size(); + } + const auto label_left_px = options.horizontal_offset_px(); + for (int i = 0; i < lines.size(); ++i) { + auto* label_annotation = render_data->add_render_annotations(); + label_annotation->set_thickness(5); + + label_annotation->mutable_color()->set_r(255); + label_annotation->mutable_color()->set_g(0); + label_annotation->mutable_color()->set_b(0); + // + auto* text = label_annotation->mutable_text(); + text->set_display_text(lines[i]); + text->set_font_height(options.font_height_px()); + text->set_left(label_left_px); + text->set_baseline(label_baseline_px + i * label_height_px); + text->set_font_face(options.font_face()); + } +} + +void IrisToRenderDataCalculator::RenderIris( + const NormalizedLandmarkList& iris_landmarks, + const IrisToRenderDataCalculatorOptions& options, + const std::pair& image_size, float iris_size, + RenderData* render_data) { + auto* oval_data_render = AddOvalRenderData(options, render_data); + auto* oval_data = oval_data_render->mutable_oval(); + const float iris_radius = iris_size / 2.f; + const auto& iris_center = iris_landmarks.landmark(0); + + oval_data->mutable_rectangle()->set_top(iris_center.y() - + iris_radius / image_size.second); + oval_data->mutable_rectangle()->set_bottom(iris_center.y() + + iris_radius / image_size.second); + oval_data->mutable_rectangle()->set_left(iris_center.x() - + iris_radius / image_size.first); + oval_data->mutable_rectangle()->set_right(iris_center.x() + + iris_radius / image_size.first); + oval_data->mutable_rectangle()->set_normalized(true); + + for (int i = 0; i < iris_landmarks.landmark_size(); ++i) { + const NormalizedLandmark& landmark = iris_landmarks.landmark(i); + auto* landmark_data_render = AddPointRenderData(options, render_data); + auto* landmark_data = landmark_data_render->mutable_point(); + landmark_data->set_normalized(true); + landmark_data->set_x(landmark.x()); + landmark_data->set_y(landmark.y()); + } +} + +void IrisToRenderDataCalculator::GetLeftIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris) { + // Center, top, bottom, left, right + *iris->add_landmark() = lds.landmark(0); + *iris->add_landmark() = lds.landmark(2); + *iris->add_landmark() = lds.landmark(4); + *iris->add_landmark() = lds.landmark(3); + *iris->add_landmark() = lds.landmark(1); +} + +void IrisToRenderDataCalculator::GetRightIris(const NormalizedLandmarkList& lds, + NormalizedLandmarkList* iris) { + // Center, top, bottom, left, right + *iris->add_landmark() = lds.landmark(5); + *iris->add_landmark() = lds.landmark(7); + *iris->add_landmark() = lds.landmark(9); + *iris->add_landmark() = lds.landmark(6); + *iris->add_landmark() = lds.landmark(8); +} + +RenderAnnotation* IrisToRenderDataCalculator::AddOvalRenderData( + const IrisToRenderDataCalculatorOptions& options, RenderData* render_data) { + auto* oval_data_annotation = render_data->add_render_annotations(); + oval_data_annotation->set_scene_tag(kOvalLabel); + + SetColor(oval_data_annotation, options.oval_color()); + oval_data_annotation->set_thickness(options.oval_thickness()); + return oval_data_annotation; +} + +RenderAnnotation* IrisToRenderDataCalculator::AddPointRenderData( + const IrisToRenderDataCalculatorOptions& options, RenderData* render_data) { + auto* landmark_data_annotation = render_data->add_render_annotations(); + SetColor(landmark_data_annotation, options.landmark_color()); + landmark_data_annotation->set_thickness(options.landmark_thickness()); + + return landmark_data_annotation; +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.proto b/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.proto new file mode 100644 index 0000000..e0fc677 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/iris_to_render_data_calculator.proto @@ -0,0 +1,62 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/util/color.proto"; + +message IrisToRenderDataCalculatorOptions { + extend CalculatorOptions { + optional IrisToRenderDataCalculatorOptions ext = 289530040; + } + + // Color of the oval. + optional Color oval_color = 1; + // Color of the landmarks. + optional Color landmark_color = 9; + + // Thickness of the drawing of landmarks and iris oval. + optional double oval_thickness = 2 [default = 1.0]; + optional double landmark_thickness = 10 [default = 1.0]; + + // The font height in absolute pixels. + optional int32 font_height_px = 3 [default = 50]; + + // The offset of the starting text in horizontal direction in absolute pixels. + optional int32 horizontal_offset_px = 7 [default = 0]; + // The offset of the starting text in vertical direction in absolute pixels. + optional int32 vertical_offset_px = 8 [default = 0]; + + // Specifies the font for the text. Font must be one of the following from + // OpenCV: + // cv::FONT_HERSHEY_SIMPLEX (0) + // cv::FONT_HERSHEY_PLAIN (1) + // cv::FONT_HERSHEY_DUPLEX (2) + // cv::FONT_HERSHEY_COMPLEX (3) + // cv::FONT_HERSHEY_TRIPLEX (4) + // cv::FONT_HERSHEY_COMPLEX_SMALL (5) + // cv::FONT_HERSHEY_SCRIPT_SIMPLEX (6) + // cv::FONT_HERSHEY_SCRIPT_COMPLEX (7) + optional int32 font_face = 5 [default = 0]; + + // Label location. + enum Location { + TOP_LEFT = 0; + BOTTOM_LEFT = 1; + } + optional Location location = 6 [default = TOP_LEFT]; +} diff --git a/mediapipe/graphs/iris_tracking/calculators/update_face_landmarks_calculator.cc b/mediapipe/graphs/iris_tracking/calculators/update_face_landmarks_calculator.cc new file mode 100644 index 0000000..de9549a --- /dev/null +++ b/mediapipe/graphs/iris_tracking/calculators/update_face_landmarks_calculator.cc @@ -0,0 +1,268 @@ +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" + +namespace mediapipe { + +namespace { + +constexpr char kFaceLandmarksTag[] = "FACE_LANDMARKS"; +constexpr char kNewEyeLandmarksTag[] = "NEW_EYE_LANDMARKS"; +constexpr char kUpdatedFaceLandmarksTag[] = "UPDATED_FACE_LANDMARKS"; + +constexpr int kNumFaceLandmarks = 468; +// 71 landamrks for left eye and 71 landmarks for right eye. +constexpr int kNumEyeLandmarks = 142; + +constexpr int kEyeLandmarkIndicesInFaceLandmarks[] = { + // Left eye + // eye lower contour + 33, + 7, + 163, + 144, + 145, + 153, + 154, + 155, + 133, + // eye upper contour (excluding corners) + 246, + 161, + 160, + 159, + 158, + 157, + 173, + // halo x2 lower contour + 130, + 25, + 110, + 24, + 23, + 22, + 26, + 112, + 243, + // halo x2 upper contour (excluding corners) + 247, + 30, + 29, + 27, + 28, + 56, + 190, + // halo x3 lower contour + 226, + 31, + 228, + 229, + 230, + 231, + 232, + 233, + 244, + // halo x3 upper contour (excluding corners) + 113, + 225, + 224, + 223, + 222, + 221, + 189, + // halo x4 upper contour (no lower because of mesh structure) + // or eyebrow inner contour + 35, + 124, + 46, + 53, + 52, + 65, + // halo x5 lower contour + 143, + 111, + 117, + 118, + 119, + 120, + 121, + 128, + 245, + // halo x5 upper contour (excluding corners) + // or eyebrow outer contour + 156, + 70, + 63, + 105, + 66, + 107, + 55, + 193, + + // Right eye + // eye lower contour + 263, + 249, + 390, + 373, + 374, + 380, + 381, + 382, + 362, + // eye upper contour (excluding corners) + 466, + 388, + 387, + 386, + 385, + 384, + 398, + // halo x2 lower contour + 359, + 255, + 339, + 254, + 253, + 252, + 256, + 341, + 463, + // halo x2 upper contour (excluding corners) + 467, + 260, + 259, + 257, + 258, + 286, + 414, + // halo x3 lower contour + 446, + 261, + 448, + 449, + 450, + 451, + 452, + 453, + 464, + // halo x3 upper contour (excluding corners) + 342, + 445, + 444, + 443, + 442, + 441, + 413, + // halo x4 upper contour (no lower because of mesh structure) + // or eyebrow inner contour + 265, + 353, + 276, + 283, + 282, + 295, + // halo x5 lower contour + 372, + 340, + 346, + 347, + 348, + 349, + 350, + 357, + 465, + // halo x5 upper contour (excluding corners) + // or eyebrow outer contour + 383, + 300, + 293, + 334, + 296, + 336, + 285, + 417, +}; + +} // namespace + +// Update face landmarks with new (e.g., refined) values. Currently only updates +// landmarks around the eyes. +// +// Usage example: +// node { +// calculator: "UpdateFaceLandmarksCalculator" +// input_stream: "NEW_EYE_LANDMARKS:new_eye_landmarks" +// input_stream: "FACE_LANDMARKS:face_landmarks" +// output_stream: "UPDATED_FACE_LANDMARKS:refine_face_landmarks" +// } +// +class UpdateFaceLandmarksCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kFaceLandmarksTag).Set(); + cc->Inputs().Tag(kNewEyeLandmarksTag).Set(); + + cc->Outputs().Tag(kUpdatedFaceLandmarksTag).Set(); + + return absl::OkStatus(); + } + absl::Status Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override; +}; +REGISTER_CALCULATOR(UpdateFaceLandmarksCalculator); + +absl::Status UpdateFaceLandmarksCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().Tag(kFaceLandmarksTag).IsEmpty() || + cc->Inputs().Tag(kNewEyeLandmarksTag).IsEmpty()) { + return absl::OkStatus(); + } + const auto& face_landmarks = + cc->Inputs().Tag(kFaceLandmarksTag).Get(); + const auto& new_eye_landmarks = + cc->Inputs().Tag(kNewEyeLandmarksTag).Get(); + + RET_CHECK_EQ(face_landmarks.landmark_size(), kNumFaceLandmarks) + << "Wrong number of face landmarks"; + RET_CHECK_EQ(new_eye_landmarks.landmark_size(), kNumEyeLandmarks) + << "Wrong number of face landmarks"; + + auto refined_face_landmarks = + absl::make_unique(face_landmarks); + for (int i = 0; i < kNumEyeLandmarks; ++i) { + const auto& refined_ld = new_eye_landmarks.landmark(i); + const int id = kEyeLandmarkIndicesInFaceLandmarks[i]; + refined_face_landmarks->mutable_landmark(id)->set_x(refined_ld.x()); + refined_face_landmarks->mutable_landmark(id)->set_y(refined_ld.y()); + refined_face_landmarks->mutable_landmark(id)->set_z(refined_ld.z()); + refined_face_landmarks->mutable_landmark(id)->set_visibility( + refined_ld.visibility()); + } + cc->Outputs() + .Tag(kUpdatedFaceLandmarksTag) + .Add(refined_face_landmarks.release(), cc->InputTimestamp()); + + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/iris_tracking/iris_depth_cpu.pbtxt b/mediapipe/graphs/iris_tracking/iris_depth_cpu.pbtxt new file mode 100644 index 0000000..3597e7f --- /dev/null +++ b/mediapipe/graphs/iris_tracking/iris_depth_cpu.pbtxt @@ -0,0 +1,159 @@ +# MediaPipe graph that performs iris distance computation on desktop with +# TensorFlow Lite on CPU. +# Used in the example in +# mediapipie/examples/desktop/iris_tracking:iris_depth_from_image_desktop. + +# Raw image bytes. (std::string) +input_stream: "input_image_bytes" + +# Image with all the detections rendered. (ImageFrame) +output_stream: "output_image" +# Estimated depth in mm from the camera to the left iris of the face (if any) in +# the image. (float) +output_stream: "left_iris_depth_mm" +# Estimated depth in mm from the camera to the right iris of the face (if any) +# in the image. (float) +output_stream: "right_iris_depth_mm" + +# Computes the focal length in pixels based on EXIF information stored in the +# image file. The output is an ImageFileProperties object containing relevant +# image EXIF information along with focal length in pixels. +node { + calculator: "ImageFilePropertiesCalculator" + input_stream: "input_image_bytes" + output_side_packet: "image_file_properties" +} + +# Converts a raw string with encoded image bytes into an ImageFrame object +# via OpenCV so that it can be processed by downstream calculators. +node { + calculator: "OpenCvEncodedImageToImageFrameCalculator" + input_stream: "input_image_bytes" + output_stream: "input_image" +} + +# Defines how many faces to detect. Iris tracking currently only handles one +# face (left and right eye), and therefore this should always be set to 1. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + } + } +} + +# Detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:input_image" + input_side_packet: "NUM_FACES:num_faces" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Gets the very first and only face from "multi_face_landmarks" vector. +node { + calculator: "SplitNormalizedLandmarkListVectorCalculator" + input_stream: "multi_face_landmarks" + output_stream: "face_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets the very first and only face rect from "face_rects_from_landmarks" +# vector. +node { + calculator: "SplitNormalizedRectVectorCalculator" + input_stream: "face_rects_from_landmarks" + output_stream: "face_rect" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets two landmarks which define left eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "left_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 33 end: 34 } + ranges: { begin: 133 end: 134 } + combine_outputs: true + } + } +} + +# Gets two landmarks which define right eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "right_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 362 end: 363 } + ranges: { begin: 263 end: 264 } + combine_outputs: true + } + } +} + +# Detects iris landmarks, eye contour landmarks, and corresponding rect (ROI). +node { + calculator: "IrisLandmarkLeftAndRightCpu" + input_stream: "IMAGE:input_image" + input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" + input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" + output_stream: "LEFT_EYE_ROI:left_eye_rect_from_landmarks" + output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" + output_stream: "RIGHT_EYE_ROI:right_eye_rect_from_landmarks" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_eye_contour_landmarks" + input_stream: "right_eye_contour_landmarks" + output_stream: "refined_eye_landmarks" +} + +node { + calculator: "UpdateFaceLandmarksCalculator" + input_stream: "NEW_EYE_LANDMARKS:refined_eye_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "UPDATED_FACE_LANDMARKS:updated_face_landmarks" +} + +# Renders annotations and overlays them on top of the input images. +node { + calculator: "IrisAndDepthRendererCpu" + input_stream: "IMAGE:input_image" + input_stream: "FACE_LANDMARKS:updated_face_landmarks" + input_stream: "EYE_LANDMARKS_LEFT:left_eye_contour_landmarks" + input_stream: "EYE_LANDMARKS_RIGHT:right_eye_contour_landmarks" + input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" + input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" + input_stream: "NORM_RECT:face_rect" + input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" + input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" + input_stream: "DETECTIONS:face_detections" + input_side_packet: "IMAGE_FILE_PROPERTIES:image_file_properties" + output_stream: "IRIS_LANDMARKS:iris_landmarks" + output_stream: "IMAGE:output_image" + output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" + output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" +} diff --git a/mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt b/mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt new file mode 100644 index 0000000..c0a3857 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/iris_tracking_cpu.pbtxt @@ -0,0 +1,142 @@ +# MediaPipe graph that performs iris tracking on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipie/examples/desktop/iris_tracking:iris_tracking_cpu. + +# CPU image. (ImageFrame) +input_stream: "input_video" + +# CPU image. (ImageFrame) +output_stream: "output_video" +# Face landmarks with iris. (NormalizedLandmarkList) +output_stream: "face_landmarks_with_iris" + +# Defines how many faces to detect. Iris tracking currently only handles one +# face (left and right eye), and therefore this should always be set to 1. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + } + } +} + +# Detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_FACES:num_faces" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Gets the very first and only face from "multi_face_landmarks" vector. +node { + calculator: "SplitNormalizedLandmarkListVectorCalculator" + input_stream: "multi_face_landmarks" + output_stream: "face_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets the very first and only face rect from "face_rects_from_landmarks" +# vector. +node { + calculator: "SplitNormalizedRectVectorCalculator" + input_stream: "face_rects_from_landmarks" + output_stream: "face_rect" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets two landmarks which define left eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "left_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 33 end: 34 } + ranges: { begin: 133 end: 134 } + combine_outputs: true + } + } +} + +# Gets two landmarks which define right eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "right_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 362 end: 363 } + ranges: { begin: 263 end: 264 } + combine_outputs: true + } + } +} + +# Detects iris landmarks, eye contour landmarks, and corresponding rect (ROI). +node { + calculator: "IrisLandmarkLeftAndRightCpu" + input_stream: "IMAGE:input_video" + input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" + input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" + output_stream: "LEFT_EYE_ROI:left_eye_rect_from_landmarks" + output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" + output_stream: "RIGHT_EYE_ROI:right_eye_rect_from_landmarks" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_eye_contour_landmarks" + input_stream: "right_eye_contour_landmarks" + output_stream: "refined_eye_landmarks" +} + +node { + calculator: "UpdateFaceLandmarksCalculator" + input_stream: "NEW_EYE_LANDMARKS:refined_eye_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "UPDATED_FACE_LANDMARKS:updated_face_landmarks" +} + +# Renders annotations and overlays them on top of the input images. +node { + calculator: "IrisRendererCpu" + input_stream: "IMAGE:input_video" + input_stream: "FACE_LANDMARKS:updated_face_landmarks" + input_stream: "EYE_LANDMARKS_LEFT:left_eye_contour_landmarks" + input_stream: "EYE_LANDMARKS_RIGHT:right_eye_contour_landmarks" + input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" + input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" + input_stream: "NORM_RECT:face_rect" + input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" + input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IRIS_LANDMARKS:iris_landmarks" + output_stream: "IMAGE:output_video" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "updated_face_landmarks" + input_stream: "iris_landmarks" + output_stream: "face_landmarks_with_iris" +} diff --git a/mediapipe/graphs/iris_tracking/iris_tracking_cpu_video_input.pbtxt b/mediapipe/graphs/iris_tracking/iris_tracking_cpu_video_input.pbtxt new file mode 100644 index 0000000..82229bd --- /dev/null +++ b/mediapipe/graphs/iris_tracking/iris_tracking_cpu_video_input.pbtxt @@ -0,0 +1,153 @@ +# MediaPipe graph that performs iris tracking on desktop with TensorFlow Lite +# on CPU. + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Defines how many faces to detect. Iris tracking currently only handles one +# face (left and right eye), and therefore this should always be set to 1. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + } + } +} + +# Detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_FACES:num_faces" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Gets the very first and only face from "multi_face_landmarks" vector. +node { + calculator: "SplitNormalizedLandmarkListVectorCalculator" + input_stream: "multi_face_landmarks" + output_stream: "face_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets the very first and only face rect from "face_rects_from_landmarks" +# vector. +node { + calculator: "SplitNormalizedRectVectorCalculator" + input_stream: "face_rects_from_landmarks" + output_stream: "face_rect" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets two landmarks which define left eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "left_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 33 end: 34 } + ranges: { begin: 133 end: 134 } + combine_outputs: true + } + } +} + +# Gets two landmarks which define right eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "right_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 362 end: 363 } + ranges: { begin: 263 end: 264 } + combine_outputs: true + } + } +} + +# Detects iris landmarks, eye contour landmarks, and corresponding rect (ROI). +node { + calculator: "IrisLandmarkLeftAndRightCpu" + input_stream: "IMAGE:input_video" + input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" + input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" + output_stream: "LEFT_EYE_ROI:left_eye_rect_from_landmarks" + output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" + output_stream: "RIGHT_EYE_ROI:right_eye_rect_from_landmarks" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_eye_contour_landmarks" + input_stream: "right_eye_contour_landmarks" + output_stream: "refined_eye_landmarks" +} + +node { + calculator: "UpdateFaceLandmarksCalculator" + input_stream: "NEW_EYE_LANDMARKS:refined_eye_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "UPDATED_FACE_LANDMARKS:updated_face_landmarks" +} + +# Renders annotations and overlays them on top of the input images. +node { + calculator: "IrisRendererCpu" + input_stream: "IMAGE:input_video" + input_stream: "FACE_LANDMARKS:updated_face_landmarks" + input_stream: "EYE_LANDMARKS_LEFT:left_eye_contour_landmarks" + input_stream: "EYE_LANDMARKS_RIGHT:right_eye_contour_landmarks" + input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" + input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" + input_stream: "NORM_RECT:face_rect" + input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" + input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IRIS_LANDMARKS:iris_landmarks" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt b/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt new file mode 100644 index 0000000..505a951 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/iris_tracking_gpu.pbtxt @@ -0,0 +1,163 @@ +# MediaPipe graph that performs iris tracking with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipie/examples/android/src/java/com/mediapipe/apps/iristrackinggpu and + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# GPU buffer. (GpuBuffer) +output_stream: "output_video" +# Face landmarks with iris. (NormalizedLandmarkList) +output_stream: "face_landmarks_with_iris" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Defines how many faces to detect. Iris tracking currently only handles one +# face (left and right eye), and therefore this should always be set to 1. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_faces" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + } + } +} + +# Detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontGpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "NUM_FACES:num_faces" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Gets the very first and only face from "multi_face_landmarks" vector. +node { + calculator: "SplitNormalizedLandmarkListVectorCalculator" + input_stream: "multi_face_landmarks" + output_stream: "face_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets the very first and only face rect from "face_rects_from_landmarks" +# vector. +node { + calculator: "SplitNormalizedRectVectorCalculator" + input_stream: "face_rects_from_landmarks" + output_stream: "face_rect" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Gets two landmarks which define left eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "left_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 33 end: 34 } + ranges: { begin: 133 end: 134 } + combine_outputs: true + } + } +} + +# Gets two landmarks which define right eye boundary. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "face_landmarks" + output_stream: "right_eye_boundary_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 362 end: 363 } + ranges: { begin: 263 end: 264 } + combine_outputs: true + } + } +} + +# Detects iris landmarks, eye contour landmarks, and corresponding rect (ROI). +node { + calculator: "IrisLandmarkLeftAndRightGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" + input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" + output_stream: "LEFT_EYE_ROI:left_eye_rect_from_landmarks" + output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" + output_stream: "RIGHT_EYE_ROI:right_eye_rect_from_landmarks" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_eye_contour_landmarks" + input_stream: "right_eye_contour_landmarks" + output_stream: "refined_eye_landmarks" +} + +node { + calculator: "UpdateFaceLandmarksCalculator" + input_stream: "NEW_EYE_LANDMARKS:refined_eye_landmarks" + input_stream: "FACE_LANDMARKS:face_landmarks" + output_stream: "UPDATED_FACE_LANDMARKS:updated_face_landmarks" +} + +# Renders annotations and overlays them on top of the input images. +node { + calculator: "IrisAndDepthRendererGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "FACE_LANDMARKS:updated_face_landmarks" + input_stream: "EYE_LANDMARKS_LEFT:left_eye_contour_landmarks" + input_stream: "EYE_LANDMARKS_RIGHT:right_eye_contour_landmarks" + input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" + input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" + input_stream: "NORM_RECT:face_rect" + input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" + input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" + input_stream: "DETECTIONS:face_detections" + input_side_packet: "FOCAL_LENGTH:focal_length_pixel" + output_stream: "IRIS_LANDMARKS:iris_landmarks" + output_stream: "IMAGE:output_video" +} + +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "updated_face_landmarks" + input_stream: "iris_landmarks" + output_stream: "face_landmarks_with_iris" +} diff --git a/mediapipe/graphs/iris_tracking/subgraphs/BUILD b/mediapipe/graphs/iris_tracking/subgraphs/BUILD new file mode 100644 index 0000000..d37c550 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/subgraphs/BUILD @@ -0,0 +1,67 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "renderer_calculators", + deps = [ + "//mediapipe/calculators/core:concatenate_normalized_landmark_list_calculator", + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + "//mediapipe/graphs/face_mesh/calculators:face_landmarks_to_render_data_calculator", + "//mediapipe/graphs/iris_tracking/calculators:iris_to_render_data_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "iris_and_depth_renderer_gpu", + graph = "iris_and_depth_renderer_gpu.pbtxt", + register_as = "IrisAndDepthRendererGpu", + deps = [ + ":renderer_calculators", + "//mediapipe/graphs/iris_tracking/calculators:iris_to_depth_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "iris_renderer_cpu", + graph = "iris_renderer_cpu.pbtxt", + register_as = "IrisRendererCpu", + deps = [ + ":renderer_calculators", + ], +) + +mediapipe_simple_subgraph( + name = "iris_and_depth_renderer_cpu", + graph = "iris_and_depth_renderer_cpu.pbtxt", + register_as = "IrisAndDepthRendererCpu", + deps = [ + ":renderer_calculators", + "//mediapipe/graphs/iris_tracking/calculators:iris_to_depth_calculator", + ], +) diff --git a/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_cpu.pbtxt b/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_cpu.pbtxt new file mode 100644 index 0000000..fad6d4a --- /dev/null +++ b/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_cpu.pbtxt @@ -0,0 +1,267 @@ +# MediaPipe iris tracking rendering subgraph. + +type: "IrisAndDepthRendererCpu" + +input_stream: "IMAGE:input_image" +input_stream: "DETECTIONS:detections" +input_stream: "FACE_LANDMARKS:face_landmarks" +input_stream: "EYE_LANDMARKS_LEFT:all_left_eye_contour_landmarks" +input_stream: "EYE_LANDMARKS_RIGHT:all_right_eye_contour_landmarks" +input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" +input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" +input_stream: "NORM_RECT:rect" +input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" +input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" +input_side_packet: "IMAGE_FILE_PROPERTIES:image_file_properties" +output_stream: "IRIS_LANDMARKS:iris_landmarks" +output_stream: "IMAGE:output_image" +output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" +output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_left_eye_contour_landmarks" + output_stream: "left_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_right_eye_contour_landmarks" + output_stream: "right_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +# Concatenate iris landmarks from both eyes. +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_iris_landmarks" + input_stream: "right_iris_landmarks" + output_stream: "iris_landmarks" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "FaceLandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA:face_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 150 g: 0 b: 0 } + connection_color { r: 0 g: 150 b: 0 } + thickness: 2 + visualize_landmark_depth: false + } + } +} + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_image" + output_stream: "SIZE:image_size" +} + +# Maps detection label IDs to the corresponding label text ("Face"). +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "detections" + output_stream: "labeled_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label: "Face" + } + } +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:labeled_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:left_eye_contour_landmarks" + output_stream: "RENDER_DATA:left_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 1.0 + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RENDER_DATA:right_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 1.0 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:rect" + output_stream: "RENDER_DATA:rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:right_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:right_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:left_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:left_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "IrisToDepthCalculator" + input_stream: "IRIS:iris_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_side_packet: "IMAGE_FILE_PROPERTIES:image_file_properties" + output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" + output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" +} + +node { + calculator: "IrisToRenderDataCalculator" + input_stream: "IRIS:iris_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" + input_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" + output_stream: "RENDER_DATA:iris_render_data" + node_options: { + [type.googleapis.com/mediapipe.IrisToRenderDataCalculatorOptions] { + oval_color { r: 0 g: 0 b: 255 } + landmark_color { r: 0 g: 255 b: 0 } + oval_thickness: 2.0 + landmark_thickness: 1.0 + font_height_px: 50 + horizontal_offset_px: 200 + vertical_offset_px: 200 + location: TOP_LEFT + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "detection_render_data" + input_stream: "face_landmarks_render_data" + input_stream: "right_eye_contour_landmarks_render_data" + input_stream: "left_eye_contour_landmarks_render_data" + input_stream: "iris_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_gpu.pbtxt b/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_gpu.pbtxt new file mode 100644 index 0000000..ba043d3 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/subgraphs/iris_and_depth_renderer_gpu.pbtxt @@ -0,0 +1,270 @@ +# MediaPipe iris tracking rendering subgraph. + +type: "IrisAndDepthRendererGpu" + +input_stream: "IMAGE:input_image" +input_stream: "DETECTIONS:detections" +input_stream: "FACE_LANDMARKS:face_landmarks" +input_stream: "EYE_LANDMARKS_LEFT:all_left_eye_contour_landmarks" +input_stream: "EYE_LANDMARKS_RIGHT:all_right_eye_contour_landmarks" +input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" +input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" +input_stream: "NORM_RECT:rect" +input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" +input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" +input_side_packet: "FOCAL_LENGTH:focal_length_pixel" +output_stream: "IRIS_LANDMARKS:iris_landmarks" +output_stream: "IMAGE:output_image" + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_left_eye_contour_landmarks" + output_stream: "left_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_right_eye_contour_landmarks" + output_stream: "right_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +# Concatenate iris landmarks from both eyes. +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_iris_landmarks" + input_stream: "right_iris_landmarks" + output_stream: "iris_landmarks" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "FaceLandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA:face_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 150 g: 0 b: 0 } + connection_color { r: 0 g: 150 b: 0 } + thickness: 2 + visualize_landmark_depth: false + } + } +} + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_image" + output_stream: "SIZE:image_size" +} + +# Maps detection label IDs to the corresponding label text ("Face"). +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "detections" + output_stream: "labeled_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label: "Face" + } + } +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:labeled_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:left_eye_contour_landmarks" + output_stream: "RENDER_DATA:left_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 2.0 + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RENDER_DATA:right_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 2.0 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:rect" + output_stream: "RENDER_DATA:rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:right_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:right_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:left_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:left_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "IrisToDepthCalculator" + input_stream: "IRIS:iris_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_side_packet: "FOCAL_LENGTH:focal_length_pixel" + output_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" + output_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" +} + +node { + calculator: "IrisToRenderDataCalculator" + input_stream: "IRIS:iris_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "LEFT_IRIS_DEPTH_MM:left_iris_depth_mm" + input_stream: "RIGHT_IRIS_DEPTH_MM:right_iris_depth_mm" + output_stream: "RENDER_DATA:iris_render_data" + node_options: { + [type.googleapis.com/mediapipe.IrisToRenderDataCalculatorOptions] { + oval_color { r: 0 g: 0 b: 255 } + landmark_color { r: 0 g: 255 b: 0 } + oval_thickness: 4.0 + landmark_thickness: 2.0 + font_height_px: 50 + horizontal_offset_px: 200 + vertical_offset_px: 200 + location: TOP_LEFT + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:input_image" + input_stream: "detection_render_data" + input_stream: "face_landmarks_render_data" + input_stream: "right_eye_contour_landmarks_render_data" + input_stream: "left_eye_contour_landmarks_render_data" + input_stream: "iris_render_data" + output_stream: "IMAGE_GPU:output_image" + node_options: { + [type.googleapis.com/mediapipe.AnnotationOverlayCalculatorOptions] { + gpu_scale_factor: 0.5 + } + } +} diff --git a/mediapipe/graphs/iris_tracking/subgraphs/iris_renderer_cpu.pbtxt b/mediapipe/graphs/iris_tracking/subgraphs/iris_renderer_cpu.pbtxt new file mode 100644 index 0000000..81a3c90 --- /dev/null +++ b/mediapipe/graphs/iris_tracking/subgraphs/iris_renderer_cpu.pbtxt @@ -0,0 +1,254 @@ +# MediaPipe iris tracking rendering subgraph. + +type: "IrisRendererCpu" + +input_stream: "IMAGE:input_image" +input_stream: "DETECTIONS:detections" +input_stream: "FACE_LANDMARKS:face_landmarks" +input_stream: "EYE_LANDMARKS_LEFT:all_left_eye_contour_landmarks" +input_stream: "EYE_LANDMARKS_RIGHT:all_right_eye_contour_landmarks" +input_stream: "IRIS_LANDMARKS_LEFT:left_iris_landmarks" +input_stream: "IRIS_LANDMARKS_RIGHT:right_iris_landmarks" +input_stream: "NORM_RECT:rect" +input_stream: "LEFT_EYE_RECT:left_eye_rect_from_landmarks" +input_stream: "RIGHT_EYE_RECT:right_eye_rect_from_landmarks" +output_stream: "IRIS_LANDMARKS:iris_landmarks" +output_stream: "IMAGE:output_image" + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_left_eye_contour_landmarks" + output_stream: "left_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_right_eye_contour_landmarks" + output_stream: "right_eye_contour_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 15 } + } + } +} + +# Concatenate iris landmarks from both eyes. +node { + calculator: "ConcatenateNormalizedLandmarkListCalculator" + input_stream: "left_iris_landmarks" + input_stream: "right_iris_landmarks" + output_stream: "iris_landmarks" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "FaceLandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "RENDER_DATA:face_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 150 g: 0 b: 0 } + connection_color { r: 0 g: 150 b: 0 } + thickness: 2 + visualize_landmark_depth: false + } + } +} + + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_image" + output_stream: "SIZE:image_size" +} + +# Maps detection label IDs to the corresponding label text ("Face"). +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "detections" + output_stream: "labeled_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label: "Face" + } + } +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:labeled_detections" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:left_eye_contour_landmarks" + output_stream: "RENDER_DATA:left_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 1.0 + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:right_eye_contour_landmarks" + output_stream: "RENDER_DATA:right_eye_contour_landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 7 + landmark_connections: 7 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 12 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 14 + landmark_connections: 0 + landmark_connections: 9 + landmark_connections: 8 + landmark_connections: 14 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 255 g: 0 b: 0 } + visualize_landmark_depth: false + thickness: 1.0 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:rect" + output_stream: "RENDER_DATA:rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:right_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:right_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:left_eye_rect_from_landmarks" + output_stream: "RENDER_DATA:left_eye_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "IrisToRenderDataCalculator" + input_stream: "IRIS:iris_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "RENDER_DATA:iris_render_data" + node_options: { + [type.googleapis.com/mediapipe.IrisToRenderDataCalculatorOptions] { + oval_color { r: 0 g: 0 b: 255 } + landmark_color { r: 0 g: 255 b: 0 } + oval_thickness: 4.0 + landmark_thickness: 2.0 + font_height_px: 50 + horizontal_offset_px: 200 + vertical_offset_px: 200 + location: TOP_LEFT + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "detection_render_data" + input_stream: "face_landmarks_render_data" + input_stream: "right_eye_contour_landmarks_render_data" + input_stream: "left_eye_contour_landmarks_render_data" + input_stream: "iris_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/media_sequence/BUILD b/mediapipe/graphs/media_sequence/BUILD new file mode 100644 index 0000000..e989147 --- /dev/null +++ b/mediapipe/graphs/media_sequence/BUILD @@ -0,0 +1,47 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "clipped_images_from_file_at_24fps_calculators", + deps = [ + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/calculators/image:opencv_image_encoder_calculator", + "//mediapipe/calculators/image:scale_image_calculator", + "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator", + "//mediapipe/calculators/tensorflow:string_to_sequence_example_calculator", + "//mediapipe/calculators/tensorflow:unpack_media_sequence_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + ], +) + +cc_library( + name = "tvl1_flow_and_rgb_from_file_calculators", + deps = [ + "//mediapipe/calculators/core:packet_inner_join_calculator", + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/calculators/core:sequence_shift_calculator", + "//mediapipe/calculators/image:opencv_image_encoder_calculator", + "//mediapipe/calculators/image:scale_image_calculator", + "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator", + "//mediapipe/calculators/tensorflow:string_to_sequence_example_calculator", + "//mediapipe/calculators/tensorflow:unpack_media_sequence_calculator", + "//mediapipe/calculators/video:flow_to_image_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:tvl1_optical_flow_calculator", + ], +) diff --git a/mediapipe/graphs/media_sequence/clipped_images_from_file_at_24fps.pbtxt b/mediapipe/graphs/media_sequence/clipped_images_from_file_at_24fps.pbtxt new file mode 100644 index 0000000..e3c6a51 --- /dev/null +++ b/mediapipe/graphs/media_sequence/clipped_images_from_file_at_24fps.pbtxt @@ -0,0 +1,78 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Convert the string input into a decoded SequenceExample. +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "STRING:input_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" +} + +# Unpack the data path and clip timing from the SequenceExample. +node { + calculator: "UnpackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "DATA_PATH:input_video_path" + output_side_packet: "RESAMPLER_OPTIONS:packet_resampler_options" + node_options: { + [type.googleapis.com/mediapipe.UnpackMediaSequenceCalculatorOptions]: { + base_packet_resampler_options: { + frame_rate: 24.0 + base_timestamp: 0 + } + } + } +} + +# Decode the entire video. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:decoded_frames" +} + +# Extract the subset of frames we want to keep. +node { + calculator: "PacketResamplerCalculator" + input_stream: "decoded_frames" + output_stream: "sampled_frames" + input_side_packet: "OPTIONS:packet_resampler_options" +} + +# Encode the images to store in the SequenceExample. +node { + calculator: "OpenCvImageEncoderCalculator" + input_stream: "sampled_frames" + output_stream: "encoded_frames" + node_options: { + [type.googleapis.com/mediapipe.OpenCvImageEncoderCalculatorOptions]: { + quality: 80 + } + } +} + +# Store the images in the SequenceExample. +node { + calculator: "PackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" + input_stream: "IMAGE:encoded_frames" +} + +# Serialize the SequenceExample to a string for storage. +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" + output_side_packet: "STRING:output_sequence_example" +} diff --git a/mediapipe/graphs/media_sequence/tvl1_flow_and_rgb_from_file.pbtxt b/mediapipe/graphs/media_sequence/tvl1_flow_and_rgb_from_file.pbtxt new file mode 100644 index 0000000..032fc36 --- /dev/null +++ b/mediapipe/graphs/media_sequence/tvl1_flow_and_rgb_from_file.pbtxt @@ -0,0 +1,153 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Convert the string input into a decoded SequenceExample. +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "STRING:input_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" +} + +# Unpack the data path and clip timing from the SequenceExample. +node { + calculator: "UnpackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "DATA_PATH:input_video_path" + output_side_packet: "RESAMPLER_OPTIONS:packet_resampler_options" + node_options: { + [type.googleapis.com/mediapipe.UnpackMediaSequenceCalculatorOptions]: { + base_packet_resampler_options: { + frame_rate: 25.0 + base_timestamp: 0 + } + } + } +} + +# Decode the entire video. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:decoded_frames" +} + +# Extract the subset of frames we want to keep. +node { + calculator: "PacketResamplerCalculator" + input_stream: "decoded_frames" + output_stream: "sampled_frames" + input_side_packet: "OPTIONS:packet_resampler_options" +} + +# Fit the images into the target size. +node: { + calculator: "ScaleImageCalculator" + input_stream: "sampled_frames" + output_stream: "scaled_frames" + node_options: { + [type.googleapis.com/mediapipe.ScaleImageCalculatorOptions]: { + target_height: 256 + preserve_aspect_ratio: true + } + } +} + +# Shift the the timestamps of packets along a stream. +# With a packet_offset of -1, the first packet will be dropped, the second will +# be output with the timestamp of the first, the third with the timestamp of +# the second, and so on. +node: { + calculator: "SequenceShiftCalculator" + input_stream: "scaled_frames" + output_stream: "shifted_scaled_frames" + node_options: { + [type.googleapis.com/mediapipe.SequenceShiftCalculatorOptions]: { + packet_offset: -1 + } + } +} + +# Join the original input stream and the one that is shifted by one packet. +node: { + calculator: "PacketInnerJoinCalculator" + input_stream: "scaled_frames" + input_stream: "shifted_scaled_frames" + output_stream: "first_frames" + output_stream: "second_frames" +} + +# Compute the forward optical flow. +node { + calculator: "Tvl1OpticalFlowCalculator" + input_stream: "FIRST_FRAME:first_frames" + input_stream: "SECOND_FRAME:second_frames" + output_stream: "FORWARD_FLOW:forward_flow" + max_in_flight: 32 +} + +# Convert an optical flow to be an image frame with 2 channels (v_x and v_y), +# each channel is quantized to 0-255. +node: { + calculator: "FlowToImageCalculator" + input_stream: "forward_flow" + output_stream: "flow_frames" + node_options: { + [type.googleapis.com/mediapipe.FlowToImageCalculatorOptions]: { + min_value: -20.0 + max_value: 20.0 + } + } +} + +# Encode the optical flow images to store in the SequenceExample. +node { + calculator: "OpenCvImageEncoderCalculator" + input_stream: "flow_frames" + output_stream: "encoded_flow_frames" + node_options: { + [type.googleapis.com/mediapipe.OpenCvImageEncoderCalculatorOptions]: { + quality: 100 + } + } +} + +# Encode the rgb images to store in the SequenceExample. +node { + calculator: "OpenCvImageEncoderCalculator" + input_stream: "scaled_frames" + output_stream: "encoded_frames" + node_options: { + [type.googleapis.com/mediapipe.OpenCvImageEncoderCalculatorOptions]: { + quality: 100 + } + } +} + +# Store the images in the SequenceExample. +node { + calculator: "PackMediaSequenceCalculator" + input_stream: "IMAGE:encoded_frames" + input_stream: "FORWARD_FLOW_ENCODED:encoded_flow_frames" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" +} + +# Serialize the SequenceExample to a string for storage. +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" + output_side_packet: "STRING:output_sequence_example" +} + +num_threads: 32 diff --git a/mediapipe/graphs/object_detection/BUILD b/mediapipe/graphs/object_detection/BUILD new file mode 100644 index 0000000..ef53fd2 --- /dev/null +++ b/mediapipe/graphs/object_detection/BUILD @@ -0,0 +1,94 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_detections_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:detection_letterbox_removal_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + "//mediapipe/gpu:image_frame_to_gpu_buffer_calculator", + ], +) + +cc_library( + name = "desktop_tensorflow_calculators", + deps = [ + "//mediapipe/calculators/tensorflow:image_frame_to_tensor_calculator", + "//mediapipe/calculators/tensorflow:lapped_tensor_buffer_calculator", + "//mediapipe/calculators/tensorflow:object_detection_tensors_to_detections_calculator", + "//mediapipe/calculators/tensorflow:tensor_squeeze_dimensions_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_inference_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_session_from_saved_model_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + ], +) + +cc_library( + name = "desktop_tflite_calculators", + deps = [ + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_detections_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + ], +) + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +mediapipe_binary_graph( + name = "mobile_cpu_binary_graph", + graph = "object_detection_mobile_cpu.pbtxt", + output_name = "mobile_cpu.binarypb", + deps = [":mobile_calculators"], +) + +mediapipe_binary_graph( + name = "mobile_gpu_binary_graph", + graph = "object_detection_mobile_gpu.pbtxt", + output_name = "mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) diff --git a/mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt b/mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt new file mode 100644 index 0000000..98b9fab --- /dev/null +++ b/mediapipe/graphs/object_detection/object_detection_desktop_live.pbtxt @@ -0,0 +1,174 @@ +# MediaPipe graph that performs object detection with TensorFlow Lite on CPU. +# Used in the examples in +# mediapipe/examples/desktop/object_detection:object_detection_cpu. + +# Images on CPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToDetectionsCalculator downstream in the graph to finish +# generating the corresponding detections before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToDetectionsCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transforms the input image on CPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:throttled_input_video" + output_stream: "IMAGE:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on CPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:transformed_input_video" + output_stream: "TENSORS:image_tensor" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + max_num_detections: 3 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/object_detection/object_detection_desktop_tensorflow_graph.pbtxt b/mediapipe/graphs/object_detection/object_detection_desktop_tensorflow_graph.pbtxt new file mode 100644 index 0000000..f12eeb6 --- /dev/null +++ b/mediapipe/graphs/object_detection/object_detection_desktop_tensorflow_graph.pbtxt @@ -0,0 +1,130 @@ +# MediaPipe graph that performs object detection on desktop with TensorFlow +# on CPU. +# Used in the example in +# mediapipie/examples/desktop/object_detection:object_detection_tensorflow. + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Converts the input image into an image tensor as a tensorflow::Tensor. +node { + calculator: "ImageFrameToTensorCalculator" + input_stream: "input_video" + output_stream: "image_tensor" +} + +# Generates a single side packet containing a TensorFlow session from a saved +# model. The directory path that contains the saved model is specified in the +# saved_model_path option, and the name of the saved model file has to be +# "saved_model.pb". +node { + calculator: "TensorFlowSessionFromSavedModelCalculator" + output_side_packet: "SESSION:object_detection_session" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowSessionFromSavedModelCalculatorOptions]: { + saved_model_path: "mediapipe/models/object_detection_saved_model" + } + } +} + +# Runs a TensorFlow session (specified as an input side packet) that takes an +# image tensor and outputs multiple tensors that describe the objects detected +# in the image. The batch_size option is set to 1 to disable batching entirely. +# Note that the particular TensorFlow model used in this session handles image +# scaling internally before the object-detection inference, and therefore no +# additional calculator for image transformation is needed in this MediaPipe +# graph. +node: { + calculator: "TensorFlowInferenceCalculator" + input_side_packet: "SESSION:object_detection_session" + input_stream: "INPUTS:image_tensor" + output_stream: "DETECTION_BOXES:detection_boxes_tensor" + output_stream: "DETECTION_CLASSES:detection_classes_tensor" + output_stream: "DETECTION_SCORES:detection_scores_tensor" + output_stream: "NUM_DETECTIONS:num_detections_tensor" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: { + batch_size: 1 + } + } +} + +# Decodes the detection tensors from the TensorFlow model into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "ObjectDetectionTensorsToDetectionsCalculator" + input_stream: "BOXES:detection_boxes_tensor" + input_stream: "SCORES:detection_scores_tensor" + input_stream: "CLASSES:detection_classes_tensor" + input_stream: "NUM_DETECTIONS:num_detections_tensor" + output_stream: "DETECTIONS:detections" +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + min_score_threshold: 0.6 + max_num_detections: 10 + overlap_type: INTERSECTION_OVER_UNION + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/object_detection/object_detection_desktop_tflite_graph.pbtxt b/mediapipe/graphs/object_detection/object_detection_desktop_tflite_graph.pbtxt new file mode 100644 index 0000000..15aa2cd --- /dev/null +++ b/mediapipe/graphs/object_detection/object_detection_desktop_tflite_graph.pbtxt @@ -0,0 +1,180 @@ +# MediaPipe graph that performs object detection on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipe/examples/desktop/object_detection:object_detection_tflite. + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Transforms the input image on CPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:input_video" + output_stream: "IMAGE:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on CPU into an image tensor as a +# TfLiteTensor. The zero_center option is set to true to normalize the +# pixel values to [-1.f, 1.f] as opposed to [0.f, 1.f]. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:transformed_input_video" + output_stream: "TENSORS:image_tensor" + node_options: { + [type.googleapis.com/mediapipe.TfLiteConverterCalculatorOptions] { + zero_center: true + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + apply_exponential_on_box_size: true + + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + min_score_threshold: 0.6 + max_num_detections: 5 + overlap_type: INTERSECTION_OVER_UNION + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "render_data" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt b/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt new file mode 100644 index 0000000..8256179 --- /dev/null +++ b/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt @@ -0,0 +1,193 @@ +# MediaPipe graph that performs object detection with TensorFlow Lite on CPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/objectdetectioncpu and +# mediapipe/examples/ios/objectdetectioncpu. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Transfers the input image from GPU to CPU memory for the purpose of +# demonstrating a CPU-based pipeline. Note that the input image on GPU has the +# origin defined at the bottom-left corner (OpenGL convention). As a result, +# the transferred image on CPU also shares the same representation. +node: { + calculator: "GpuBufferToImageFrameCalculator" + input_stream: "input_video" + output_stream: "input_video_cpu" +} + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToDetectionsCalculator downstream in the graph to finish +# generating the corresponding detections before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToDetectionsCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video_cpu" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video_cpu" +} + +# Transforms the input image on CPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:throttled_input_video_cpu" + output_stream: "IMAGE:transformed_input_video_cpu" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on CPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:transformed_input_video_cpu" + output_stream: "TENSORS:image_tensor" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + max_num_detections: 3 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:throttled_input_video_cpu" + input_stream: "render_data" + output_stream: "IMAGE:output_video_cpu" +} + +# Transfers the annotated image from CPU back to GPU memory, to be sent out of +# the graph. +node: { + calculator: "ImageFrameToGpuBufferCalculator" + input_stream: "output_video_cpu" + output_stream: "output_video" +} diff --git a/mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt b/mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt new file mode 100644 index 0000000..1ed66e8 --- /dev/null +++ b/mediapipe/graphs/object_detection/object_detection_mobile_gpu.pbtxt @@ -0,0 +1,175 @@ +# MediaPipe graph that performs object detection with TensorFlow Lite on GPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/objectdetectiongpu and +# mediapipe/examples/ios/objectdetectiongpu. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for +# TfLiteTensorsToDetectionsCalculator downstream in the graph to finish +# generating the corresponding detections before it passes through another +# image. All images that come in while waiting are dropped, limiting the number +# of in-flight images between this calculator and +# TfLiteTensorsToDetectionsCalculator to 1. This prevents the nodes in between +# from queuing up incoming images and data excessively, which leads to increased +# latency and memory usage, unwanted in real-time mobile applications. It also +# eliminates unnecessarily computation, e.g., a transformed image produced by +# ImageTransformationCalculator may get dropped downstream if the subsequent +# TfLiteConverterCalculator or TfLiteInferenceCalculator is still busy +# processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transforms the input image on GPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + output_stream: "IMAGE_GPU:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on GPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE_GPU:transformed_input_video" + output_stream: "TENSORS_GPU:image_tensor" +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS_GPU:image_tensor" + output_stream: "TENSORS_GPU:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS_GPU:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + max_num_detections: 3 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:output_detections" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "render_data" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/object_detection_3d/BUILD b/mediapipe/graphs/object_detection_3d/BUILD new file mode 100644 index 0000000..7ba00c0 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/BUILD @@ -0,0 +1,80 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +exports_files(glob([ + "*.pbtxt", +])) + +cc_library( + name = "mobile_calculators", + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_cropping_calculator", + "//mediapipe/graphs/object_detection_3d/calculators:annotations_to_model_matrices_calculator", + "//mediapipe/graphs/object_detection_3d/calculators:gl_animation_overlay_calculator", + "//mediapipe/modules/objectron:objectron_gpu", + ], +) + +cc_library( + name = "mobile_calculators_1stage", + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/calculators/image:image_cropping_calculator", + "//mediapipe/gpu:gl_scaler_calculator", + "//mediapipe/graphs/object_detection_3d/calculators:annotations_to_model_matrices_calculator", + "//mediapipe/graphs/object_detection_3d/calculators:gl_animation_overlay_calculator", + "//mediapipe/modules/objectron:objectron_detection_1stage_gpu", + "//mediapipe/modules/objectron:objectron_tracking_1stage_gpu", + ], +) + +cc_library( + name = "desktop_cpu_calculators", + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + "//mediapipe/graphs/object_detection_3d/subgraphs:renderer_cpu", + "//mediapipe/modules/objectron:objectron_cpu", + ], +) + +mediapipe_binary_graph( + name = "mobile_gpu_binary_graph", + graph = "object_occlusion_tracking.pbtxt", + output_name = "mobile_gpu_binary_graph.binarypb", + visibility = ["//visibility:public"], + deps = [":mobile_calculators"], +) + +mediapipe_binary_graph( + name = "mobile_gpu_1stage_binary_graph", + graph = "object_occlusion_tracking_1stage.pbtxt", + output_name = "mobile_gpu_1stage_binary_graph.binarypb", + visibility = ["//visibility:public"], + deps = [":mobile_calculators_1stage"], +) diff --git a/mediapipe/graphs/object_detection_3d/calculators/BUILD b/mediapipe/graphs/object_detection_3d/calculators/BUILD new file mode 100644 index 0000000..8f80312 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/BUILD @@ -0,0 +1,113 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_proto_library( + name = "gl_animation_overlay_calculator_proto", + srcs = ["gl_animation_overlay_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "annotations_to_model_matrices_calculator_proto", + srcs = ["annotations_to_model_matrices_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "model_matrix_proto", + srcs = ["model_matrix.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "annotations_to_render_data_calculator_proto", + srcs = ["annotations_to_render_data_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_proto", + "//mediapipe/util:color_proto", + ], +) + +cc_library( + name = "gl_animation_overlay_calculator", + srcs = ["gl_animation_overlay_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":gl_animation_overlay_calculator_cc_proto", + ":model_matrix_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/gpu:gl_calculator_helper", + "//mediapipe/gpu:shader_util", + "//mediapipe/modules/objectron/calculators:camera_parameters_cc_proto", + "//mediapipe/util/android:asset_manager_util", + ], + alwayslink = 1, +) + +cc_library( + name = "annotations_to_model_matrices_calculator", + srcs = ["annotations_to_model_matrices_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotations_to_model_matrices_calculator_cc_proto", + ":model_matrix_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/modules/objectron/calculators:annotation_cc_proto", + "//mediapipe/modules/objectron/calculators:box", + "//mediapipe/util:color_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings", + "@eigen_archive//:eigen3", + ], + alwayslink = 1, +) + +cc_library( + name = "annotations_to_render_data_calculator", + srcs = ["annotations_to_render_data_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotations_to_render_data_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/modules/objectron/calculators:annotation_cc_proto", + "//mediapipe/util:color_cc_proto", + "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) diff --git a/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.cc b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.cc new file mode 100644 index 0000000..183f6fc --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.cc @@ -0,0 +1,215 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include "Eigen/Core" +#include "Eigen/Dense" +#include "Eigen/Geometry" +#include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.pb.h" +#include "mediapipe/graphs/object_detection_3d/calculators/model_matrix.pb.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/box.h" +#include "mediapipe/util/color.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kAnnotationTag[] = "ANNOTATIONS"; +constexpr char kModelMatricesTag[] = "MODEL_MATRICES"; + +using Matrix3fRM = Eigen::Matrix; +using Matrix4fRM = Eigen::Matrix; + +} // namespace + +// Converts the box prediction from Objectron Model to the Model matrices +// to be rendered. +// +// Input: +// ANNOTATIONS - Frame annotations with lifted 3D points, the points are in +// Objectron coordinate system. +// Output: +// MODEL_MATRICES - Result ModelMatrices, in OpenGL coordinate system. +// +// Usage example: +// node { +// calculator: "AnnotationsToModelMatricesCalculator" +// input_stream: "ANNOTATIONS:objects" +// output_stream: "MODEL_MATRICES:model_matrices" +//} + +class AnnotationsToModelMatricesCalculator : public CalculatorBase { + public: + AnnotationsToModelMatricesCalculator() {} + ~AnnotationsToModelMatricesCalculator() override {} + AnnotationsToModelMatricesCalculator( + const AnnotationsToModelMatricesCalculator&) = delete; + AnnotationsToModelMatricesCalculator& operator=( + const AnnotationsToModelMatricesCalculator&) = delete; + + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + + absl::Status Process(CalculatorContext* cc) override; + + private: + absl::Status GetModelMatricesForAnnotations( + const FrameAnnotation& annotations, + TimedModelMatrixProtoList* model_matrix_list); + + AnnotationsToModelMatricesCalculatorOptions options_; + Eigen::Vector3f model_scale_; + Matrix4fRM model_transformation_; +}; +REGISTER_CALCULATOR(AnnotationsToModelMatricesCalculator); + +absl::Status AnnotationsToModelMatricesCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kAnnotationTag)) << "No input stream found."; + if (cc->Inputs().HasTag(kAnnotationTag)) { + cc->Inputs().Tag(kAnnotationTag).Set(); + } + + if (cc->Outputs().HasTag(kModelMatricesTag)) { + cc->Outputs().Tag(kModelMatricesTag).Set(); + } + + if (cc->InputSidePackets().HasTag("MODEL_SCALE")) { + cc->InputSidePackets().Tag("MODEL_SCALE").Set(); + } + + if (cc->InputSidePackets().HasTag("MODEL_TRANSFORMATION")) { + cc->InputSidePackets().Tag("MODEL_TRANSFORMATION").Set(); + } + return absl::OkStatus(); +} + +absl::Status AnnotationsToModelMatricesCalculator::Open(CalculatorContext* cc) { + RET_CHECK(cc->Inputs().HasTag(kAnnotationTag)); + + cc->SetOffset(TimestampDiff(0)); + options_ = cc->Options(); + + if (cc->InputSidePackets().HasTag("MODEL_SCALE")) { + model_scale_ = Eigen::Map( + cc->InputSidePackets().Tag("MODEL_SCALE").Get()); + } else if (options_.model_scale_size() == 3) { + model_scale_ = + Eigen::Map(options_.model_scale().data()); + } else { + model_scale_.setOnes(); + } + + if (cc->InputSidePackets().HasTag("MODEL_TRANSFORMATION")) { + model_transformation_ = Eigen::Map( + cc->InputSidePackets().Tag("MODEL_TRANSFORMATION").Get()); + } else if (options_.model_transformation_size() == 16) { + model_transformation_ = + Eigen::Map(options_.model_transformation().data()); + } else { + model_transformation_.setIdentity(); + } + + return absl::OkStatus(); +} + +absl::Status AnnotationsToModelMatricesCalculator::Process( + CalculatorContext* cc) { + auto model_matrices = std::make_unique(); + + const FrameAnnotation& annotations = + cc->Inputs().Tag(kAnnotationTag).Get(); + + if (!GetModelMatricesForAnnotations(annotations, model_matrices.get()).ok()) { + return absl::InvalidArgumentError("Error in GetModelMatricesForBoxes"); + } + cc->Outputs() + .Tag(kModelMatricesTag) + .Add(model_matrices.release(), cc->InputTimestamp()); + + return absl::OkStatus(); +} + +absl::Status +AnnotationsToModelMatricesCalculator::GetModelMatricesForAnnotations( + const FrameAnnotation& annotations, + TimedModelMatrixProtoList* model_matrix_list) { + if (model_matrix_list == nullptr) { + return absl::InvalidArgumentError("model_matrix_list is nullptr"); + } + model_matrix_list->clear_model_matrix(); + + for (const auto& object : annotations.annotations()) { + TimedModelMatrixProto* model_matrix = model_matrix_list->add_model_matrix(); + model_matrix->set_id(object.object_id()); + + // Get object rotation, translation and scale. + const auto object_rotation = + Eigen::Map(object.rotation().data()); + const auto object_translation = + Eigen::Map(object.translation().data()); + const auto object_scale = + Eigen::Map(object.scale().data()); + + // Compose object transformation matrix. + Matrix4fRM object_transformation; + object_transformation.setIdentity(); + object_transformation.topLeftCorner<3, 3>() = object_rotation; + object_transformation.topRightCorner<3, 1>() = object_translation; + + Matrix4fRM model_view; + Matrix4fRM objectron_model; + // The reference view is + // + // ref << 0., 0., 1., 0., + // -1., 0., 0., 0., + // 0., -1., 0., 0., + // 0., 0., 0., 1.; + // We have objectron_model * model = model_view, to get objectron_model: + // objectron_model = model_view * model^-1 + // clang-format off + objectron_model << 1.0, 0.0, 0.0, 0.0, + 0.0, -1., 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, + 0.0, 0.0, 0.0, 1.0; + // clang-format on + + // Re-scale the CAD model to the scale of the estimated bounding box. + const Eigen::Vector3f scale = model_scale_.cwiseProduct(object_scale); + const Matrix4fRM model = + model_transformation_.array().colwise() * scale.homogeneous().array(); + + // Finally compute the model_view matrix. + model_view = objectron_model * object_transformation * model; + + for (int i = 0; i < model_view.rows(); ++i) { + for (int j = 0; j < model_view.cols(); ++j) { + model_matrix->add_matrix_entries(model_view(i, j)); + } + } + } + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.proto b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.proto new file mode 100644 index 0000000..c0159d4 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_model_matrices_calculator.proto @@ -0,0 +1,33 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message AnnotationsToModelMatricesCalculatorOptions { + extend CalculatorOptions { + optional AnnotationsToModelMatricesCalculatorOptions ext = 290166283; + } + + // Vector of size 3 indicating the scale vector [x, y, z]. We will re-scale + // the model size with this vector. (Defaults to [1., 1., 1.]) + repeated float model_scale = 1; + + // 4x4 Row major matrix denoting the transformation from the model to the + // Deep Pursuit 3D coordinate system (where front is +z, and up is +y). + repeated float model_transformation = 2; +} diff --git a/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.cc b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.cc new file mode 100644 index 0000000..65bff77 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.cc @@ -0,0 +1,271 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.pb.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/util/color.pb.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kAnnotationTag[] = "ANNOTATIONS"; +constexpr char kRenderDataTag[] = "RENDER_DATA"; +constexpr char kKeypointLabel[] = "KEYPOINT"; +constexpr int kMaxLandmarkThickness = 18; + +inline void SetColor(RenderAnnotation* annotation, const Color& color) { + annotation->mutable_color()->set_r(color.r()); + annotation->mutable_color()->set_g(color.g()); + annotation->mutable_color()->set_b(color.b()); +} + +// Remap x from range [lo hi] to range [0 1] then multiply by scale. +inline float Remap(float x, float lo, float hi, float scale) { + return (x - lo) / (hi - lo + 1e-6) * scale; +} + +inline void GetMinMaxZ(const FrameAnnotation& annotations, float* z_min, + float* z_max) { + *z_min = std::numeric_limits::max(); + *z_max = std::numeric_limits::min(); + // Use a global depth scale for all the objects in the scene + for (const auto& object : annotations.annotations()) { + for (const auto& keypoint : object.keypoints()) { + *z_min = std::min(keypoint.point_2d().depth(), *z_min); + *z_max = std::max(keypoint.point_2d().depth(), *z_max); + } + } +} + +void SetColorSizeValueFromZ(float z, float z_min, float z_max, + RenderAnnotation* render_annotation) { + const int color_value = 255 - static_cast(Remap(z, z_min, z_max, 255)); + ::mediapipe::Color color; + color.set_r(color_value); + color.set_g(color_value); + color.set_b(color_value); + SetColor(render_annotation, color); + const int thickness = static_cast((1.f - Remap(z, z_min, z_max, 1)) * + kMaxLandmarkThickness); + render_annotation->set_thickness(thickness); +} + +} // namespace + +// A calculator that converts FrameAnnotation proto to RenderData proto for +// visualization. The input should be the FrameAnnotation proto buffer. It is +// also possible to specify the connections between landmarks. +// +// Example config: +// node { +// calculator: "AnnotationsToRenderDataCalculator" +// input_stream: "ANNOTATIONS:annotations" +// output_stream: "RENDER_DATA:render_data" +// options { +// [AnnotationsToRenderDataCalculator.ext] { +// landmark_connections: [0, 1, 1, 2] +// landmark_color { r: 0 g: 255 b: 0 } +// connection_color { r: 0 g: 255 b: 0 } +// thickness: 4.0 +// } +// } +// } +class AnnotationsToRenderDataCalculator : public CalculatorBase { + public: + AnnotationsToRenderDataCalculator() {} + ~AnnotationsToRenderDataCalculator() override {} + AnnotationsToRenderDataCalculator(const AnnotationsToRenderDataCalculator&) = + delete; + AnnotationsToRenderDataCalculator& operator=( + const AnnotationsToRenderDataCalculator&) = delete; + + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + + absl::Status Process(CalculatorContext* cc) override; + + private: + static void SetRenderAnnotationColorThickness( + const AnnotationsToRenderDataCalculatorOptions& options, + RenderAnnotation* render_annotation); + static RenderAnnotation* AddPointRenderData( + const AnnotationsToRenderDataCalculatorOptions& options, + RenderData* render_data); + + // Add a command to draw a line in the rendering queue. The line is drawn from + // (start_x, start_y) to (end_x, end_y). The input x,y can either be in pixel + // or normalized coordinate [0, 1] as indicated by the normalized flag. + static void AddConnectionToRenderData( + float start_x, float start_y, float end_x, float end_y, + const AnnotationsToRenderDataCalculatorOptions& options, bool normalized, + RenderData* render_data); + + // Same as above function. Instead of using color data to render the line, it + // re-colors the line according to the two depth value. gray_val1 is the color + // of the starting point and gray_val2 is the color of the ending point. The + // line is colored using gradient color from gray_val1 to gray_val2. The + // gray_val ranges from [0 to 255] for black to white. + static void AddConnectionToRenderData( + float start_x, float start_y, float end_x, float end_y, + const AnnotationsToRenderDataCalculatorOptions& options, bool normalized, + int gray_val1, int gray_val2, RenderData* render_data); + + AnnotationsToRenderDataCalculatorOptions options_; +}; +REGISTER_CALCULATOR(AnnotationsToRenderDataCalculator); + +absl::Status AnnotationsToRenderDataCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kAnnotationTag)) << "No input stream found."; + if (cc->Inputs().HasTag(kAnnotationTag)) { + cc->Inputs().Tag(kAnnotationTag).Set(); + } + cc->Outputs().Tag(kRenderDataTag).Set(); + + return absl::OkStatus(); +} + +absl::Status AnnotationsToRenderDataCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + options_ = cc->Options(); + + return absl::OkStatus(); +} + +absl::Status AnnotationsToRenderDataCalculator::Process(CalculatorContext* cc) { + auto render_data = absl::make_unique(); + bool visualize_depth = options_.visualize_landmark_depth(); + float z_min = 0.f; + float z_max = 0.f; + + if (cc->Inputs().HasTag(kAnnotationTag)) { + const auto& annotations = + cc->Inputs().Tag(kAnnotationTag).Get(); + RET_CHECK_EQ(options_.landmark_connections_size() % 2, 0) + << "Number of entries in landmark connections must be a multiple of 2"; + + if (visualize_depth) { + GetMinMaxZ(annotations, &z_min, &z_max); + // Only change rendering if there are actually z values other than 0. + visualize_depth &= ((z_max - z_min) > 1e-3); + } + + for (const auto& object : annotations.annotations()) { + for (const auto& keypoint : object.keypoints()) { + auto* keypoint_data_render = + AddPointRenderData(options_, render_data.get()); + auto* point = keypoint_data_render->mutable_point(); + if (visualize_depth) { + SetColorSizeValueFromZ(keypoint.point_2d().depth(), z_min, z_max, + keypoint_data_render); + } + + point->set_normalized(true); + point->set_x(keypoint.point_2d().x()); + point->set_y(keypoint.point_2d().y()); + } + + // Add edges + for (int i = 0; i < options_.landmark_connections_size(); i += 2) { + const auto& ld0 = + object.keypoints(options_.landmark_connections(i)).point_2d(); + const auto& ld1 = + object.keypoints(options_.landmark_connections(i + 1)).point_2d(); + const bool normalized = true; + + if (visualize_depth) { + const int gray_val1 = + 255 - static_cast(Remap(ld0.depth(), z_min, z_max, 255)); + const int gray_val2 = + 255 - static_cast(Remap(ld1.depth(), z_min, z_max, 255)); + AddConnectionToRenderData(ld0.x(), ld0.y(), ld1.x(), ld1.y(), + options_, normalized, gray_val1, gray_val2, + render_data.get()); + } else { + AddConnectionToRenderData(ld0.x(), ld0.y(), ld1.x(), ld1.y(), + options_, normalized, render_data.get()); + } + } + } + } + + cc->Outputs() + .Tag(kRenderDataTag) + .Add(render_data.release(), cc->InputTimestamp()); + + return absl::OkStatus(); +} + +void AnnotationsToRenderDataCalculator::AddConnectionToRenderData( + float start_x, float start_y, float end_x, float end_y, + const AnnotationsToRenderDataCalculatorOptions& options, bool normalized, + int gray_val1, int gray_val2, RenderData* render_data) { + auto* connection_annotation = render_data->add_render_annotations(); + RenderAnnotation::GradientLine* line = + connection_annotation->mutable_gradient_line(); + line->set_x_start(start_x); + line->set_y_start(start_y); + line->set_x_end(end_x); + line->set_y_end(end_y); + line->set_normalized(normalized); + line->mutable_color1()->set_r(gray_val1); + line->mutable_color1()->set_g(gray_val1); + line->mutable_color1()->set_b(gray_val1); + line->mutable_color2()->set_r(gray_val2); + line->mutable_color2()->set_g(gray_val2); + line->mutable_color2()->set_b(gray_val2); + connection_annotation->set_thickness(options.thickness()); +} + +void AnnotationsToRenderDataCalculator::AddConnectionToRenderData( + float start_x, float start_y, float end_x, float end_y, + const AnnotationsToRenderDataCalculatorOptions& options, bool normalized, + RenderData* render_data) { + auto* connection_annotation = render_data->add_render_annotations(); + RenderAnnotation::Line* line = connection_annotation->mutable_line(); + line->set_x_start(start_x); + line->set_y_start(start_y); + line->set_x_end(end_x); + line->set_y_end(end_y); + line->set_normalized(normalized); + SetColor(connection_annotation, options.connection_color()); + connection_annotation->set_thickness(options.thickness()); +} + +RenderAnnotation* AnnotationsToRenderDataCalculator::AddPointRenderData( + const AnnotationsToRenderDataCalculatorOptions& options, + RenderData* render_data) { + auto* landmark_data_annotation = render_data->add_render_annotations(); + landmark_data_annotation->set_scene_tag(kKeypointLabel); + SetRenderAnnotationColorThickness(options, landmark_data_annotation); + return landmark_data_annotation; +} + +void AnnotationsToRenderDataCalculator::SetRenderAnnotationColorThickness( + const AnnotationsToRenderDataCalculatorOptions& options, + RenderAnnotation* render_annotation) { + SetColor(render_annotation, options.landmark_color()); + render_annotation->set_thickness(options.thickness()); +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.proto b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.proto new file mode 100644 index 0000000..1e04d95 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/annotations_to_render_data_calculator.proto @@ -0,0 +1,43 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/util/color.proto"; + +message AnnotationsToRenderDataCalculatorOptions { + extend CalculatorOptions { + optional AnnotationsToRenderDataCalculatorOptions ext = 267644238; + } + + // Specifies the landmarks to be connected in the drawing. For example, the + // landmark_connections value of [0, 1, 1, 2] specifies two connections: one + // that connects landmarks with index 0 and 1, and another that connects + // landmarks with index 1 and 2. + repeated int32 landmark_connections = 1; + + // Color of the landmarks. + optional Color landmark_color = 2; + // Color of the connections. + optional Color connection_color = 3; + + // Thickness of the drawing of landmarks and connections. + optional double thickness = 4 [default = 1.0]; + + // Change color and size of rendered landmarks based on its z value. + optional bool visualize_landmark_depth = 5 [default = true]; +} diff --git a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc new file mode 100644 index 0000000..9bc43ba --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc @@ -0,0 +1,947 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if defined(__ANDROID__) +#include "mediapipe/util/android/asset_manager_util.h" +#else +#include +#include +#endif + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/gpu/gl_calculator_helper.h" +#include "mediapipe/gpu/shader_util.h" +#include "mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.pb.h" +#include "mediapipe/graphs/object_detection_3d/calculators/model_matrix.pb.h" +#include "mediapipe/modules/objectron/calculators/camera_parameters.pb.h" + +namespace mediapipe { + +namespace { + +#if defined(GL_DEBUG) +#define GLCHECK(command) \ + command; \ + if (int err = glGetError()) LOG(ERROR) << "GL error detected: " << err; +#else +#define GLCHECK(command) command +#endif + +// For ease of use, we prefer ImageFrame on Android and GpuBuffer otherwise. +#if defined(__ANDROID__) +typedef ImageFrame AssetTextureFormat; +#else +typedef GpuBuffer AssetTextureFormat; +#endif + +enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, ATTRIB_NORMAL, NUM_ATTRIBUTES }; +static const int kNumMatrixEntries = 16; + +// Hard-coded MVP Matrix for testing. +static const float kModelMatrix[] = {0.83704215, -0.36174262, 0.41049102, 0.0, + 0.06146407, 0.8076706, 0.5864218, 0.0, + -0.54367524, -0.4656292, 0.69828844, 0.0, + 0.0, 0.0, -98.64117, 1.0}; + +// Loads a texture from an input side packet, and streams in an animation file +// from a filename given in another input side packet, and renders the animation +// over the screen according to the input timestamp and desired animation FPS. +// +// Inputs: +// VIDEO (GpuBuffer, optional): +// If provided, the input buffer will be assumed to be unique, and will be +// consumed by this calculator and rendered to directly. The output video +// buffer will then be the released reference to the input video buffer. +// MODEL_MATRICES (TimedModelMatrixProtoList, optional): +// If provided, will set the model matrices for the objects to be rendered +// during future rendering calls. +// TEXTURE (ImageFrame on Android / GpuBuffer on iOS, semi-optional): +// Texture to use with animation file. Texture is REQUIRED to be passed into +// the calculator, but can be passed in as a Side Packet OR Input Stream. +// +// Input side packets: +// TEXTURE (ImageFrame on Android / GpuBuffer on iOS, semi-optional): +// Texture to use with animation file. Texture is REQUIRED to be passed into +// the calculator, but can be passed in as a Side Packet OR Input Stream. +// ANIMATION_ASSET (String, required): +// Path of animation file to load and render. The file format expects an +// arbitrary number of animation frames, concatenated directly together, +// with each animation frame looking like: +// HEADER +// VERTICES +// TEXTURE_COORDS +// INDICES +// The header consists of 3 int32 lengths, the sizes of the vertex data, +// the texcoord data, and the index data, respectively. Let us call those +// N1, N2, and N3. Then we expect N1 float32's for vertex information +// (x1,y1,z1,x2,y2,z2,etc.), followed by N2 float32's for texcoord +// information (u1,v1,u2,v2,u3,v3,etc.), followed by N3 shorts/int16's +// for triangle indices (a1,b1,c1,a2,b2,c2,etc.). +// CAMERA_PARAMETERS_PROTO_STRING (String, optional): +// Serialized proto std::string of CameraParametersProto. We need this to +// get the right aspect ratio and field of view. +// Options: +// aspect_ratio: the ratio between the rendered image width and height. +// It will be ignored if CAMERA_PARAMETERS_PROTO_STRING input side packet +// is provided. +// vertical_fov_degrees: vertical field of view in degrees. +// It will be ignored if CAMERA_PARAMETERS_PROTO_STRING input side packet +// is provided. +// z_clipping_plane_near: near plane value for z-clipping. +// z_clipping_plane_far: far plane value for z-clipping. +// animation_speed_fps: speed at which to cycle through animation frames (in +// frames per second). +// +// Outputs: +// OUTPUT, or index 0 (GpuBuffer): +// Frames filled with the given texture. + +// Simple helper-struct for containing the parsed geometry data from a 3D +// animation frame for rendering. +struct TriangleMesh { + int index_count = 0; // Needed for glDrawElements rendering call + std::unique_ptr normals = nullptr; + std::unique_ptr vertices = nullptr; + std::unique_ptr texture_coords = nullptr; + std::unique_ptr triangle_indices = nullptr; +}; + +typedef std::unique_ptr ModelMatrix; + +} // namespace + +class GlAnimationOverlayCalculator : public CalculatorBase { + public: + GlAnimationOverlayCalculator() {} + ~GlAnimationOverlayCalculator(); + + static absl::Status GetContract(CalculatorContract *cc); + + absl::Status Open(CalculatorContext *cc) override; + absl::Status Process(CalculatorContext *cc) override; + + private: + bool has_video_stream_ = false; + bool has_model_matrix_stream_ = false; + bool has_mask_model_matrix_stream_ = false; + bool has_occlusion_mask_ = false; + + GlCalculatorHelper helper_; + bool initialized_ = false; + GlTexture texture_; + GlTexture mask_texture_; + + GLuint renderbuffer_ = 0; + bool depth_buffer_created_ = false; + + GLuint program_ = 0; + GLint texture_uniform_ = -1; + GLint perspective_matrix_uniform_ = -1; + GLint model_matrix_uniform_ = -1; + + std::vector triangle_meshes_; + std::vector mask_meshes_; + Timestamp animation_start_time_; + int frame_count_ = 0; + float animation_speed_fps_; + + std::vector current_model_matrices_; + std::vector current_mask_model_matrices_; + + // Perspective matrix for rendering, to be applied to all model matrices + // prior to passing through to the shader as a MVP matrix. Initialized during + // first image packet read. + float perspective_matrix_[kNumMatrixEntries]; + + void ComputeAspectRatioAndFovFromCameraParameters( + const CameraParametersProto &camera_parameters, float *aspect_ratio, + float *vertical_fov_degrees); + + int GetAnimationFrameIndex(Timestamp timestamp); + absl::Status GlSetup(); + absl::Status GlBind(const TriangleMesh &triangle_mesh, + const GlTexture &texture); + absl::Status GlRender(const TriangleMesh &triangle_mesh, + const float *model_matrix); + void InitializePerspectiveMatrix(float aspect_ratio, + float vertical_fov_degrees, float z_near, + float z_far); + void LoadModelMatrices(const TimedModelMatrixProtoList &model_matrices, + std::vector *current_model_matrices); + void CalculateTriangleMeshNormals(int normals_len, + TriangleMesh *triangle_mesh); + void Normalize3f(float input[3]); + +#if !defined(__ANDROID__) + // Asset loading routine for all non-Android platforms. + bool LoadAnimation(const std::string &filename); +#else + // Asset loading for all Android platforms. + bool LoadAnimationAndroid(const std::string &filename, + std::vector *mesh); + bool ReadBytesFromAsset(AAsset *asset, void *buffer, int num_bytes_to_read); +#endif +}; +REGISTER_CALCULATOR(GlAnimationOverlayCalculator); + +// static +absl::Status GlAnimationOverlayCalculator::GetContract(CalculatorContract *cc) { + MP_RETURN_IF_ERROR( + GlCalculatorHelper::SetupInputSidePackets(&(cc->InputSidePackets()))); + if (cc->Inputs().HasTag("VIDEO")) { + // Currently used only for size and timestamp. + cc->Inputs().Tag("VIDEO").Set(); + } + TagOrIndex(&(cc->Outputs()), "OUTPUT", 0).Set(); + + if (cc->Inputs().HasTag("MODEL_MATRICES")) { + cc->Inputs().Tag("MODEL_MATRICES").Set(); + } + if (cc->Inputs().HasTag("MASK_MODEL_MATRICES")) { + cc->Inputs().Tag("MASK_MODEL_MATRICES").Set(); + } + + // Must have texture as Input Stream or Side Packet + if (cc->InputSidePackets().HasTag("TEXTURE")) { + cc->InputSidePackets().Tag("TEXTURE").Set(); + } else { + cc->Inputs().Tag("TEXTURE").Set(); + } + + cc->InputSidePackets().Tag("ANIMATION_ASSET").Set(); + if (cc->InputSidePackets().HasTag("CAMERA_PARAMETERS_PROTO_STRING")) { + cc->InputSidePackets() + .Tag("CAMERA_PARAMETERS_PROTO_STRING") + .Set(); + } + + if (cc->InputSidePackets().HasTag("MASK_TEXTURE")) { + cc->InputSidePackets().Tag("MASK_TEXTURE").Set(); + } + if (cc->InputSidePackets().HasTag("MASK_ASSET")) { + cc->InputSidePackets().Tag("MASK_ASSET").Set(); + } + + return absl::OkStatus(); +} + +void GlAnimationOverlayCalculator::CalculateTriangleMeshNormals( + int normals_len, TriangleMesh *triangle_mesh) { + // Set triangle_mesh normals for shader usage + triangle_mesh->normals.reset(new float[normals_len]); + // Used for storing the vertex normals prior to averaging + std::vector vertex_normals_sum(normals_len, 0.0f); + // Compute every triangle surface normal and store them for averaging + for (int idx = 0; idx < triangle_mesh->index_count; idx += 3) { + int v_idx[3]; + v_idx[0] = triangle_mesh->triangle_indices.get()[idx]; + v_idx[1] = triangle_mesh->triangle_indices.get()[idx + 1]; + v_idx[2] = triangle_mesh->triangle_indices.get()[idx + 2]; + // (V1) vertex X,Y,Z indices in triangle_mesh.vertices + const float v1x = triangle_mesh->vertices[v_idx[0] * 3]; + const float v1y = triangle_mesh->vertices[v_idx[0] * 3 + 1]; + const float v1z = triangle_mesh->vertices[v_idx[0] * 3 + 2]; + // (V2) vertex X,Y,Z indices in triangle_mesh.vertices + const float v2x = triangle_mesh->vertices[v_idx[1] * 3]; + const float v2y = triangle_mesh->vertices[v_idx[1] * 3 + 1]; + const float v2z = triangle_mesh->vertices[v_idx[1] * 3 + 2]; + // (V3) vertex X,Y,Z indices in triangle_mesh.vertices + const float v3x = triangle_mesh->vertices[v_idx[2] * 3]; + const float v3y = triangle_mesh->vertices[v_idx[2] * 3 + 1]; + const float v3z = triangle_mesh->vertices[v_idx[2] * 3 + 2]; + // Calculate normals from vertices + // V2 - V1 + const float ax = v2x - v1x; + const float ay = v2y - v1y; + const float az = v2z - v1z; + // V3 - V1 + const float bx = v3x - v1x; + const float by = v3y - v1y; + const float bz = v3z - v1z; + // Calculate cross product + const float normal_x = ay * bz - az * by; + const float normal_y = az * bx - ax * bz; + const float normal_z = ax * by - ay * bx; + // The normals calculated above must be normalized if we wish to prevent + // triangles with a larger surface area from dominating the normal + // calculations, however, none of our current models require this + // normalization. + + // Add connected normal to each associated vertex + // It is also necessary to increment each vertex denominator for averaging + for (int i = 0; i < 3; i++) { + vertex_normals_sum[v_idx[i] * 3] += normal_x; + vertex_normals_sum[v_idx[i] * 3 + 1] += normal_y; + vertex_normals_sum[v_idx[i] * 3 + 2] += normal_z; + } + } + + // Combine all triangle normals connected to each vertex by adding the X,Y,Z + // value of each adjacent triangle surface normal to every vertex and then + // averaging the combined value. + for (int idx = 0; idx < normals_len; idx += 3) { + float normal[3]; + normal[0] = vertex_normals_sum[idx]; + normal[1] = vertex_normals_sum[idx + 1]; + normal[2] = vertex_normals_sum[idx + 2]; + Normalize3f(normal); + triangle_mesh->normals.get()[idx] = normal[0]; + triangle_mesh->normals.get()[idx + 1] = normal[1]; + triangle_mesh->normals.get()[idx + 2] = normal[2]; + } +} + +void GlAnimationOverlayCalculator::Normalize3f(float input[3]) { + float product = 0.0; + product += input[0] * input[0]; + product += input[1] * input[1]; + product += input[2] * input[2]; + float magnitude = sqrt(product); + input[0] /= magnitude; + input[1] /= magnitude; + input[2] /= magnitude; +} + +// Helper function for initializing our perspective matrix. +void GlAnimationOverlayCalculator::InitializePerspectiveMatrix( + float aspect_ratio, float fov_degrees, float z_near, float z_far) { + // Standard perspective projection matrix calculations. + const float f = 1.0f / std::tan(fov_degrees * M_PI / 360.0f); + for (int i = 0; i < kNumMatrixEntries; i++) { + perspective_matrix_[i] = 0; + } + const float denom = 1.0f / (z_near - z_far); + perspective_matrix_[0] = f / aspect_ratio; + perspective_matrix_[5] = f; + perspective_matrix_[10] = (z_near + z_far) * denom; + perspective_matrix_[11] = -1.0f; + perspective_matrix_[14] = 2.0f * z_far * z_near * denom; +} + +#if defined(__ANDROID__) +// Helper function for reading in a specified number of bytes from an Android +// asset. Returns true if successfully reads in all bytes into buffer. +bool GlAnimationOverlayCalculator::ReadBytesFromAsset(AAsset *asset, + void *buffer, + int num_bytes_to_read) { + // Most file systems use block sizes of 4KB or 8KB; ideally we'd choose a + // small multiple of the block size for best input streaming performance, so + // we go for a reasobably safe buffer size of 8KB = 8*1024 bytes. + static const int kMaxChunkSize = 8192; + + int bytes_left = num_bytes_to_read; + int bytes_read = 1; // any value > 0 here just to start looping. + + // Treat as uint8_t array so we can deal in single byte arithmetic easily. + uint8_t *currBufferIndex = reinterpret_cast(buffer); + while (bytes_read > 0 && bytes_left > 0) { + bytes_read = AAsset_read(asset, (void *)currBufferIndex, + std::min(bytes_left, kMaxChunkSize)); + bytes_left -= bytes_read; + currBufferIndex += bytes_read; + } + // At least log any I/O errors encountered. + if (bytes_read < 0) { + LOG(ERROR) << "Error reading from AAsset: " << bytes_read; + return false; + } + if (bytes_left > 0) { + // Reached EOF before reading in specified number of bytes. + LOG(WARNING) << "Reached EOF before reading in specified number of bytes."; + return false; + } + return true; +} + +// The below asset streaming code is Android-only, making use of the platform +// JNI helper classes AAssetManager and AAsset. +bool GlAnimationOverlayCalculator::LoadAnimationAndroid( + const std::string &filename, std::vector *meshes) { + mediapipe::AssetManager *mediapipe_asset_manager = + Singleton::get(); + AAssetManager *asset_manager = mediapipe_asset_manager->GetAssetManager(); + if (!asset_manager) { + LOG(ERROR) << "Failed to access Android asset manager."; + return false; + } + + // New read-bytes stuff here! First we open file for streaming. + AAsset *asset = AAssetManager_open(asset_manager, filename.c_str(), + AASSET_MODE_STREAMING); + if (!asset) { + LOG(ERROR) << "Failed to open animation asset: " << filename; + return false; + } + + // And now, while we are able to stream in more frames, we do so. + frame_count_ = 0; + int32 lengths[3]; + while (ReadBytesFromAsset(asset, (void *)lengths, sizeof(lengths[0]) * 3)) { + // About to start reading the next animation frame. Stream it in here. + // Each frame stores first the object counts of its three arrays + // (vertices, texture coordinates, triangle indices; respectively), and + // then stores each of those arrays as a byte dump, in order. + meshes->emplace_back(); + TriangleMesh &triangle_mesh = meshes->back(); + // Try to read in vertices (4-byte floats) + triangle_mesh.vertices.reset(new float[lengths[0]]); + if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.vertices.get(), + sizeof(float) * lengths[0])) { + LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; + return false; + } + // Try to read in texture coordinates (4-byte floats) + triangle_mesh.texture_coords.reset(new float[lengths[1]]); + if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.texture_coords.get(), + sizeof(float) * lengths[1])) { + LOG(ERROR) << "Failed to read tex-coords for frame " << frame_count_; + return false; + } + // Try to read in indices (2-byte shorts) + triangle_mesh.index_count = lengths[2]; + triangle_mesh.triangle_indices.reset(new int16[lengths[2]]); + if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.triangle_indices.get(), + sizeof(int16) * lengths[2])) { + LOG(ERROR) << "Failed to read indices for frame " << frame_count_; + return false; + } + + // Set the normals for this triangle_mesh + CalculateTriangleMeshNormals(lengths[0], &triangle_mesh); + + frame_count_++; + } + AAsset_close(asset); + + LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; + if (meshes->empty()) { + LOG(ERROR) << "No animation frames were parsed! Erroring out calculator."; + return false; + } + return true; +} + +#else // defined(__ANDROID__) + +bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { + std::ifstream infile(filename.c_str(), std::ifstream::binary); + if (!infile) { + LOG(ERROR) << "Error opening asset with filename: " << filename; + return false; + } + + frame_count_ = 0; + int32 lengths[3]; + while (true) { + // See if we have more initial size counts to read in. + infile.read((char *)(lengths), sizeof(lengths[0]) * 3); + if (!infile) { + // No more frames to read. Close out. + infile.close(); + break; + } + + triangle_meshes_.emplace_back(); + TriangleMesh &triangle_mesh = triangle_meshes_.back(); + + // Try to read in vertices (4-byte floats). + triangle_mesh.vertices.reset(new float[lengths[0]]); + infile.read((char *)(triangle_mesh.vertices.get()), + sizeof(float) * lengths[0]); + if (!infile) { + LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; + return false; + } + + // Try to read in texture coordinates (4-byte floats) + triangle_mesh.texture_coords.reset(new float[lengths[1]]); + infile.read((char *)(triangle_mesh.texture_coords.get()), + sizeof(float) * lengths[1]); + if (!infile) { + LOG(ERROR) << "Failed to read texture coordinates for frame " + << frame_count_; + return false; + } + + // Try to read in the triangle indices (2-byte shorts) + triangle_mesh.index_count = lengths[2]; + triangle_mesh.triangle_indices.reset(new int16[lengths[2]]); + infile.read((char *)(triangle_mesh.triangle_indices.get()), + sizeof(int16) * lengths[2]); + if (!infile) { + LOG(ERROR) << "Failed to read triangle indices for frame " + << frame_count_; + return false; + } + + // Set the normals for this triangle_mesh + CalculateTriangleMeshNormals(lengths[0], &triangle_mesh); + + frame_count_++; + } + + LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; + if (triangle_meshes_.empty()) { + LOG(ERROR) << "No animation frames were parsed! Erroring out calculator."; + return false; + } + return true; +} + +#endif + +void GlAnimationOverlayCalculator::ComputeAspectRatioAndFovFromCameraParameters( + const CameraParametersProto &camera_parameters, float *aspect_ratio, + float *vertical_fov_degrees) { + CHECK(aspect_ratio != nullptr); + CHECK(vertical_fov_degrees != nullptr); + *aspect_ratio = + camera_parameters.portrait_width() / camera_parameters.portrait_height(); + *vertical_fov_degrees = + std::atan(camera_parameters.portrait_height() * 0.5f) * 2 * 180 / M_PI; +} + +absl::Status GlAnimationOverlayCalculator::Open(CalculatorContext *cc) { + cc->SetOffset(TimestampDiff(0)); + MP_RETURN_IF_ERROR(helper_.Open(cc)); + + const auto &options = cc->Options(); + + animation_speed_fps_ = options.animation_speed_fps(); + + // Construct projection matrix using input side packets or option + float aspect_ratio; + float vertical_fov_degrees; + if (cc->InputSidePackets().HasTag("CAMERA_PARAMETERS_PROTO_STRING")) { + const std::string &camera_parameters_proto_string = + cc->InputSidePackets() + .Tag("CAMERA_PARAMETERS_PROTO_STRING") + .Get(); + CameraParametersProto camera_parameters_proto; + camera_parameters_proto.ParseFromString(camera_parameters_proto_string); + ComputeAspectRatioAndFovFromCameraParameters( + camera_parameters_proto, &aspect_ratio, &vertical_fov_degrees); + } else { + aspect_ratio = options.aspect_ratio(); + vertical_fov_degrees = options.vertical_fov_degrees(); + } + + // when constructing projection matrix. + InitializePerspectiveMatrix(aspect_ratio, vertical_fov_degrees, + options.z_clipping_plane_near(), + options.z_clipping_plane_far()); + + // See what streams we have. + has_video_stream_ = cc->Inputs().HasTag("VIDEO"); + has_model_matrix_stream_ = cc->Inputs().HasTag("MODEL_MATRICES"); + has_mask_model_matrix_stream_ = cc->Inputs().HasTag("MASK_MODEL_MATRICES"); + + // Try to load in the animation asset in a platform-specific manner. + const std::string &asset_name = + cc->InputSidePackets().Tag("ANIMATION_ASSET").Get(); + bool loaded_animation = false; +#if defined(__ANDROID__) + if (cc->InputSidePackets().HasTag("MASK_ASSET")) { + has_occlusion_mask_ = true; + const std::string &mask_asset_name = + cc->InputSidePackets().Tag("MASK_ASSET").Get(); + loaded_animation = LoadAnimationAndroid(mask_asset_name, &mask_meshes_); + if (!loaded_animation) { + LOG(ERROR) << "Failed to load mask asset."; + return absl::UnknownError("Failed to load mask asset."); + } + } + loaded_animation = LoadAnimationAndroid(asset_name, &triangle_meshes_); +#else + loaded_animation = LoadAnimation(asset_name); +#endif + if (!loaded_animation) { + LOG(ERROR) << "Failed to load animation asset."; + return absl::UnknownError("Failed to load animation asset."); + } + + return helper_.RunInGlContext([this, &cc]() -> absl::Status { + if (cc->InputSidePackets().HasTag("MASK_TEXTURE")) { + const auto &mask_texture = + cc->InputSidePackets().Tag("MASK_TEXTURE").Get(); + mask_texture_ = helper_.CreateSourceTexture(mask_texture); + } + + // Load in all static texture data if it exists + if (cc->InputSidePackets().HasTag("TEXTURE")) { + const auto &input_texture = + cc->InputSidePackets().Tag("TEXTURE").Get(); + texture_ = helper_.CreateSourceTexture(input_texture); + } + + VLOG(2) << "Input texture size: " << texture_.width() << ", " + << texture_.height() << std::endl; + + return absl::OkStatus(); + }); +} + +int GlAnimationOverlayCalculator::GetAnimationFrameIndex(Timestamp timestamp) { + double seconds_delta = timestamp.Seconds() - animation_start_time_.Seconds(); + int64_t frame_index = + static_cast(seconds_delta * animation_speed_fps_); + frame_index %= frame_count_; + return static_cast(frame_index); +} + +void GlAnimationOverlayCalculator::LoadModelMatrices( + const TimedModelMatrixProtoList &model_matrices, + std::vector *current_model_matrices) { + current_model_matrices->clear(); + for (int i = 0; i < model_matrices.model_matrix_size(); ++i) { + const auto &model_matrix = model_matrices.model_matrix(i); + CHECK(model_matrix.matrix_entries_size() == kNumMatrixEntries) + << "Invalid Model Matrix"; + current_model_matrices->emplace_back(); + ModelMatrix &new_matrix = current_model_matrices->back(); + new_matrix.reset(new float[kNumMatrixEntries]); + for (int j = 0; j < kNumMatrixEntries; j++) { + // Model matrices streamed in using ROW-MAJOR format, but we want + // COLUMN-MAJOR for rendering, so we transpose here. + int col = j % 4; + int row = j / 4; + new_matrix[row + col * 4] = model_matrix.matrix_entries(j); + } + } +} + +absl::Status GlAnimationOverlayCalculator::Process(CalculatorContext *cc) { + return helper_.RunInGlContext([this, &cc]() -> absl::Status { + if (!initialized_) { + MP_RETURN_IF_ERROR(GlSetup()); + initialized_ = true; + animation_start_time_ = cc->InputTimestamp(); + } + + // Process model matrices, if any are being streamed in, and update our + // list. + current_model_matrices_.clear(); + if (has_model_matrix_stream_ && + !cc->Inputs().Tag("MODEL_MATRICES").IsEmpty()) { + const TimedModelMatrixProtoList &model_matrices = + cc->Inputs().Tag("MODEL_MATRICES").Get(); + LoadModelMatrices(model_matrices, ¤t_model_matrices_); + } + + current_mask_model_matrices_.clear(); + if (has_mask_model_matrix_stream_ && + !cc->Inputs().Tag("MASK_MODEL_MATRICES").IsEmpty()) { + const TimedModelMatrixProtoList &model_matrices = + cc->Inputs() + .Tag("MASK_MODEL_MATRICES") + .Get(); + LoadModelMatrices(model_matrices, ¤t_mask_model_matrices_); + } + + // Arbitrary default width and height for output destination texture, in the + // event that we don't have a valid and unique input buffer to overlay. + int width = 640; + int height = 480; + + GlTexture dst; + std::unique_ptr input_frame(nullptr); + if (has_video_stream_ && !(cc->Inputs().Tag("VIDEO").IsEmpty())) { + auto result = cc->Inputs().Tag("VIDEO").Value().Consume(); + if (result.ok()) { + input_frame = std::move(result).value(); +#if !MEDIAPIPE_GPU_BUFFER_USE_CV_PIXEL_BUFFER + input_frame->GetGlTextureBufferSharedPtr()->Reuse(); +#endif + width = input_frame->width(); + height = input_frame->height(); + dst = helper_.CreateSourceTexture(*input_frame); + } else { + LOG(ERROR) << "Unable to consume input video frame for overlay!"; + LOG(ERROR) << "Status returned was: " << result.status(); + dst = helper_.CreateDestinationTexture(width, height); + } + } else if (!has_video_stream_) { + dst = helper_.CreateDestinationTexture(width, height); + } else { + // We have an input video stream, but not for this frame. Don't render! + return absl::OkStatus(); + } + helper_.BindFramebuffer(dst); + + if (!depth_buffer_created_) { + // Create our private depth buffer. + GLCHECK(glGenRenderbuffers(1, &renderbuffer_)); + GLCHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer_)); + GLCHECK(glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, + width, height)); + GLCHECK(glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, + GL_RENDERBUFFER, renderbuffer_)); + GLCHECK(glBindRenderbuffer(GL_RENDERBUFFER, 0)); + depth_buffer_created_ = true; + } + + // Re-bind our depth renderbuffer to our FBO depth attachment here. + GLCHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer_)); + GLCHECK(glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, + GL_RENDERBUFFER, renderbuffer_)); + GLenum status = GLCHECK(glCheckFramebufferStatus(GL_FRAMEBUFFER)); + if (status != GL_FRAMEBUFFER_COMPLETE) { + LOG(ERROR) << "Incomplete framebuffer with status: " << status; + } + GLCHECK(glClear(GL_DEPTH_BUFFER_BIT)); + + if (has_occlusion_mask_) { + glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); + const TriangleMesh &mask_frame = mask_meshes_.front(); + MP_RETURN_IF_ERROR(GlBind(mask_frame, mask_texture_)); + // Draw objects using our latest model matrix stream packet. + for (const ModelMatrix &model_matrix : current_mask_model_matrices_) { + MP_RETURN_IF_ERROR(GlRender(mask_frame, model_matrix.get())); + } + } + + glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); + int frame_index = GetAnimationFrameIndex(cc->InputTimestamp()); + const TriangleMesh ¤t_frame = triangle_meshes_[frame_index]; + + // Load dynamic texture if it exists + if (cc->Inputs().HasTag("TEXTURE")) { + const auto &input_texture = + cc->Inputs().Tag("TEXTURE").Get(); + texture_ = helper_.CreateSourceTexture(input_texture); + } + + MP_RETURN_IF_ERROR(GlBind(current_frame, texture_)); + if (has_model_matrix_stream_) { + // Draw objects using our latest model matrix stream packet. + for (const ModelMatrix &model_matrix : current_model_matrices_) { + MP_RETURN_IF_ERROR(GlRender(current_frame, model_matrix.get())); + } + } else { + // Just draw one object to a static model matrix. + MP_RETURN_IF_ERROR(GlRender(current_frame, kModelMatrix)); + } + + // Disable vertex attributes + GLCHECK(glDisableVertexAttribArray(ATTRIB_VERTEX)); + GLCHECK(glDisableVertexAttribArray(ATTRIB_TEXTURE_POSITION)); + GLCHECK(glDisableVertexAttribArray(ATTRIB_NORMAL)); + + // Disable depth test + GLCHECK(glDisable(GL_DEPTH_TEST)); + + // Unbind texture + GLCHECK(glActiveTexture(GL_TEXTURE1)); + GLCHECK(glBindTexture(texture_.target(), 0)); + + // Unbind depth buffer + GLCHECK(glBindRenderbuffer(GL_RENDERBUFFER, 0)); + + GLCHECK(glFlush()); + + auto output = dst.GetFrame(); + dst.Release(); + TagOrIndex(&(cc->Outputs()), "OUTPUT", 0) + .Add(output.release(), cc->InputTimestamp()); + GLCHECK(glFrontFace(GL_CCW)); + return absl::OkStatus(); + }); +} + +absl::Status GlAnimationOverlayCalculator::GlSetup() { + // Load vertex and fragment shaders + const GLint attr_location[NUM_ATTRIBUTES] = { + ATTRIB_VERTEX, + ATTRIB_TEXTURE_POSITION, + ATTRIB_NORMAL, + }; + const GLchar *attr_name[NUM_ATTRIBUTES] = { + "position", + "texture_coordinate", + "normal", + }; + + const GLchar *vert_src = R"( + // Perspective projection matrix for rendering / clipping + uniform mat4 perspectiveMatrix; + + // Matrix defining the currently rendered object model + uniform mat4 modelMatrix; + + // vertex position in threespace + attribute vec4 position; + attribute vec3 normal; + + // texture coordinate for each vertex in normalized texture space (0..1) + attribute mediump vec4 texture_coordinate; + + // texture coordinate for fragment shader (will be interpolated) + varying mediump vec2 sampleCoordinate; + varying mediump vec3 vNormal; + + void main() { + sampleCoordinate = texture_coordinate.xy; + mat4 mvpMatrix = perspectiveMatrix * modelMatrix; + gl_Position = mvpMatrix * position; + + // TODO: Pass in rotation submatrix with no scaling or transforms to prevent + // breaking vNormal in case of model matrix having non-uniform scaling + vec4 tmpNormal = mvpMatrix * vec4(normal, 1.0); + vec4 transformedZero = mvpMatrix * vec4(0.0, 0.0, 0.0, 1.0); + tmpNormal = tmpNormal - transformedZero; + vNormal = normalize(tmpNormal.xyz); + } + )"; + + const GLchar *frag_src = R"( + precision mediump float; + + varying vec2 sampleCoordinate; // texture coordinate (0..1) + varying vec3 vNormal; + uniform sampler2D texture; // texture to shade with + const float kPi = 3.14159265359; + + // Define ambient lighting factor that is applied to our texture in order to + // generate ambient lighting of the scene on the object. Range is [0.0-1.0], + // with the factor being proportional to the brightness of the lighting in the + // scene being applied to the object + const float kAmbientLighting = 0.75; + + // Define RGB values for light source + const vec3 kLightColor = vec3(0.25); + // Exponent for directional lighting that governs diffusion of surface light + const float kExponent = 1.0; + // Define direction of lighting effect source + const vec3 lightDir = vec3(0.0, -1.0, -0.6); + // Hard-coded view direction + const vec3 viewDir = vec3(0.0, 0.0, -1.0); + + // DirectionalLighting procedure imported from Lullaby @ https://github.com/google/lullaby + // Calculate and return the color (diffuse and specular together) reflected by + // a directional light. + vec3 GetDirectionalLight(vec3 pos, vec3 normal, vec3 viewDir, vec3 lightDir, vec3 lightColor, float exponent) { + // Intensity of the diffuse light. Saturate to keep within the 0-1 range. + float normal_dot_light_dir = dot(-normal, -lightDir); + float intensity = clamp(normal_dot_light_dir, 0.0, 1.0); + // Calculate the diffuse light + vec3 diffuse = intensity * lightColor; + // http://www.rorydriscoll.com/2009/01/25/energy-conservation-in-games/ + float kEnergyConservation = (2.0 + exponent) / (2.0 * kPi); + vec3 reflect_dir = reflect(lightDir, -normal); + // Intensity of the specular light + float view_dot_reflect = dot(-viewDir, reflect_dir); + // Use an epsilon for pow because pow(x,y) is undefined if x < 0 or x == 0 + // and y <= 0 (GLSL Spec 8.2) + const float kEpsilon = 1e-5; + intensity = kEnergyConservation * pow(clamp(view_dot_reflect, kEpsilon, 1.0), + exponent); + // Specular color: + vec3 specular = intensity * lightColor; + return diffuse + specular; + } + + void main() { + // Sample the texture, retrieving an rgba pixel value + vec4 pixel = texture2D(texture, sampleCoordinate); + // If the alpha (background) value is near transparent, then discard the + // pixel, this allows the rendering of transparent background GIFs + // TODO: Adding a toggle to perform pixel alpha discarding for transparent + // GIFs (prevent interference with Objectron system). + if (pixel.a < 0.2) discard; + + // Generate directional lighting effect + vec3 lighting = GetDirectionalLight(gl_FragCoord.xyz, vNormal, viewDir, lightDir, kLightColor, kExponent); + // Apply both ambient and directional lighting to our texture + gl_FragColor = vec4((vec3(kAmbientLighting) + lighting) * pixel.rgb, 1.0); + } + )"; + + // Shader program + GLCHECK(GlhCreateProgram(vert_src, frag_src, NUM_ATTRIBUTES, + (const GLchar **)&attr_name[0], attr_location, + &program_)); + RET_CHECK(program_) << "Problem initializing the program."; + texture_uniform_ = GLCHECK(glGetUniformLocation(program_, "texture")); + perspective_matrix_uniform_ = + GLCHECK(glGetUniformLocation(program_, "perspectiveMatrix")); + model_matrix_uniform_ = + GLCHECK(glGetUniformLocation(program_, "modelMatrix")); + return absl::OkStatus(); +} + +absl::Status GlAnimationOverlayCalculator::GlBind( + const TriangleMesh &triangle_mesh, const GlTexture &texture) { + GLCHECK(glUseProgram(program_)); + + // Disable backface culling to allow occlusion effects. + // Some options for solid arbitrary 3D geometry rendering + GLCHECK(glEnable(GL_BLEND)); + GLCHECK(glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)); + GLCHECK(glEnable(GL_DEPTH_TEST)); + GLCHECK(glFrontFace(GL_CW)); + GLCHECK(glDepthMask(GL_TRUE)); + GLCHECK(glDepthFunc(GL_LESS)); + + // Clear our depth buffer before starting draw calls + GLCHECK(glVertexAttribPointer(ATTRIB_VERTEX, 3, GL_FLOAT, 0, 0, + triangle_mesh.vertices.get())); + GLCHECK(glEnableVertexAttribArray(ATTRIB_VERTEX)); + GLCHECK(glVertexAttribPointer(ATTRIB_TEXTURE_POSITION, 2, GL_FLOAT, 0, 0, + triangle_mesh.texture_coords.get())); + GLCHECK(glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION)); + GLCHECK(glVertexAttribPointer(ATTRIB_NORMAL, 3, GL_FLOAT, 0, 0, + triangle_mesh.normals.get())); + GLCHECK(glEnableVertexAttribArray(ATTRIB_NORMAL)); + GLCHECK(glActiveTexture(GL_TEXTURE1)); + GLCHECK(glBindTexture(texture.target(), texture.name())); + + // We previously bound it to GL_TEXTURE1 + GLCHECK(glUniform1i(texture_uniform_, 1)); + + GLCHECK(glUniformMatrix4fv(perspective_matrix_uniform_, 1, GL_FALSE, + perspective_matrix_)); + return absl::OkStatus(); +} + +absl::Status GlAnimationOverlayCalculator::GlRender( + const TriangleMesh &triangle_mesh, const float *model_matrix) { + GLCHECK(glUniformMatrix4fv(model_matrix_uniform_, 1, GL_FALSE, model_matrix)); + GLCHECK(glDrawElements(GL_TRIANGLES, triangle_mesh.index_count, + GL_UNSIGNED_SHORT, + triangle_mesh.triangle_indices.get())); + return absl::OkStatus(); +} + +GlAnimationOverlayCalculator::~GlAnimationOverlayCalculator() { + helper_.RunInGlContext([this] { + if (program_) { + GLCHECK(glDeleteProgram(program_)); + program_ = 0; + } + if (depth_buffer_created_) { + GLCHECK(glDeleteRenderbuffers(1, &renderbuffer_)); + renderbuffer_ = 0; + } + if (texture_.width() > 0) { + texture_.Release(); + } + if (mask_texture_.width() > 0) { + mask_texture_.Release(); + } + }); +} + +} // namespace mediapipe diff --git a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto new file mode 100644 index 0000000..4966f0a --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto @@ -0,0 +1,41 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message GlAnimationOverlayCalculatorOptions { + extend CalculatorOptions { + optional GlAnimationOverlayCalculatorOptions ext = 174760573; + } + + // Default aspect ratio of rendering target width over height. + // This specific value is for 3:4 view. Do not change this default value. + optional float aspect_ratio = 1 [default = 0.75]; + // Default vertical field of view in degrees. This specific default value + // is arbitrary. Do not change this default value. If you want to use + // a different vertical_fov_degrees, set it in the options. + optional float vertical_fov_degrees = 2 [default = 70.0]; + + // Perspective projection matrix z-clipping near plane value. + optional float z_clipping_plane_near = 3 [default = 0.1]; + // Perspective projection matrix z-clipping far plane value. + optional float z_clipping_plane_far = 4 [default = 1000.0]; + + // Speed at which to play the animation (in frames per second). + optional float animation_speed_fps = 5 [default = 25.0]; +} diff --git a/mediapipe/graphs/object_detection_3d/calculators/model_matrix.proto b/mediapipe/graphs/object_detection_3d/calculators/model_matrix.proto new file mode 100644 index 0000000..406cc9f --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/calculators/model_matrix.proto @@ -0,0 +1,48 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +message TimedModelMatrixProto { + // 4x4 model matrix stored in ROW major order. + repeated float matrix_entries = 1 [packed = true]; + // Timestamp of this model matrix in milliseconds. + optional int64 time_msec = 2 [default = 0]; + // Unique per object id + optional int32 id = 3 [default = -1]; +} + +message TimedModelMatrixProtoList { + repeated TimedModelMatrixProto model_matrix = 1; +} + +// For convenience, when the desired information or transformation can be +// encoded into vectors (e.g. when the matrix represents a scale or Euler-angle- +// based rotation operation.) +message TimedVectorProto { + // The vector values themselves. + repeated float vector_entries = 1 [packed = true]; + + // Timestamp of this vector in milliseconds. + optional int64 time_msec = 2 [default = 0]; + + // Unique per object id + optional int32 id = 3 [default = -1]; +} + +message TimedVectorProtoList { + repeated TimedVectorProto vector_list = 1; +} diff --git a/mediapipe/graphs/object_detection_3d/obj_parser/BUILD b/mediapipe/graphs/object_detection_3d/obj_parser/BUILD new file mode 100644 index 0000000..3b84cc8 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/obj_parser/BUILD @@ -0,0 +1,33 @@ +# Copyright 2021 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +java_library( + name = "obj_parser_lib", + srcs = [ + "ObjParserMain.java", + "SimpleObjParser.java", + ], + javacopts = ["-Xep:DefaultPackage:OFF"], +) + +java_binary( + name = "ObjParser", + javacopts = ["-Xep:DefaultPackage:OFF"], + main_class = "ObjParserMain", + runtime_deps = [ + ":obj_parser_lib", + ], +) diff --git a/mediapipe/graphs/object_detection_3d/obj_parser/ObjParserMain.java b/mediapipe/graphs/object_detection_3d/obj_parser/ObjParserMain.java new file mode 100644 index 0000000..80e639d --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/obj_parser/ObjParserMain.java @@ -0,0 +1,205 @@ +// Copyright 2021 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import static java.nio.charset.StandardCharsets.UTF_8; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileFilter; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.Arrays; + +/** + * Class for running desktop-side parsing/packing routines on .obj AR assets. Usage: ObjParser + * --input_dir=[INPUT_DIRECTORY] --output_dir=[OUTPUT_DIRECTORY] where INPUT_DIRECTORY is the folder + * with asset obj files to process, and OUTPUT_DIRECTORY is the folder where processed asset uuu + * file should be placed. + * + *

NOTE: Directories are assumed to be absolute paths. + */ +public final class ObjParserMain { + // Simple FileFilter implementation to let us walk over only our .obj files in a particular + // directory. + private static final class ObjFileFilter implements FileFilter { + ObjFileFilter() { + // Nothing to do here. + } + + @Override + public boolean accept(File file) { + return file.getName().endsWith(".obj"); + } + } + + // File extension for binary output files; tagged onto end of initial file extension. + private static final String BINARY_FILE_EXT = ".uuu"; + private static final String INPUT_DIR_FLAG = "--input_dir="; + private static final String OUTPUT_DIR_FLAG = "--output_dir="; + private static final float DEFAULT_VERTEX_SCALE_FACTOR = 30.0f; + private static final double NS_TO_SECONDS = 1e9; + + public final PrintWriter writer; + + public ObjParserMain() { + super(); + this.writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out, UTF_8))); + } + + // Simple overridable logging function. + protected void logString(String infoLog) { + writer.println(infoLog); + } + + /* + * Main program logic: parse command-line arguments and perform actions. + */ + public void run(String inDirectory, String outDirectory) { + if (inDirectory.isEmpty()) { + logString("Error: Must provide input directory with " + INPUT_DIR_FLAG); + return; + } + if (outDirectory.isEmpty()) { + logString("Error: Must provide output directory with " + OUTPUT_DIR_FLAG); + return; + } + + File dirAsFile = new File(inDirectory); + ObjFileFilter objFileFilter = new ObjFileFilter(); + File[] objFiles = dirAsFile.listFiles(objFileFilter); + + FileOutputStream outputStream = null; + logString("Parsing directory: " + inDirectory); + // We need frames processed in correct order. + Arrays.sort(objFiles); + + for (File objFile : objFiles) { + String fileName = objFile.getAbsolutePath(); + + // Just take the file name of the first processed frame. + if (outputStream == null) { + String outputFileName = outDirectory + objFile.getName() + BINARY_FILE_EXT; + try { + // Create new file here, if we can. + outputStream = new FileOutputStream(outputFileName); + logString("Created outfile: " + outputFileName); + } catch (Exception e) { + logString("Error creating outfile: " + e.toString()); + e.printStackTrace(writer); + return; + } + } + + // Process each file into the stream. + logString("Processing file: " + fileName); + processFile(fileName, outputStream); + } + + // Finally close the stream out. + try { + if (outputStream != null) { + outputStream.close(); + } + } catch (Exception e) { + logString("Error trying to close output stream: " + e.toString()); + e.printStackTrace(writer); + } + } + + /* + * Entrypoint for command-line executable. + */ + public static void main(String[] args) { + // Parse flags + String inDirectory = ""; + String outDirectory = ""; + for (int i = 0; i < args.length; i++) { + if (args[i].startsWith(INPUT_DIR_FLAG)) { + inDirectory = args[i].substring(INPUT_DIR_FLAG.length()); + // Make sure this will be treated as a directory + if (!inDirectory.endsWith("/")) { + inDirectory += "/"; + } + } + if (args[i].startsWith(OUTPUT_DIR_FLAG)) { + outDirectory = args[i].substring(OUTPUT_DIR_FLAG.length()); + // Make sure this will be treated as a directory + if (!outDirectory.endsWith("/")) { + outDirectory += "/"; + } + } + } + ObjParserMain parser = new ObjParserMain(); + parser.run(inDirectory, outDirectory); + parser.writer.flush(); + } + + /* + * Internal helper function to parse a .obj from an infile name and stream the resulting data + * directly out in binary-dump format to outputStream. + */ + private void processFile(String infileName, OutputStream outputStream) { + long start = System.nanoTime(); + + // First we parse the obj. + SimpleObjParser objParser = new SimpleObjParser(infileName, DEFAULT_VERTEX_SCALE_FACTOR); + if (!objParser.parse()) { + logString("Error parsing .obj file before processing"); + return; + } + + final float[] vertices = objParser.getVertices(); + final float[] textureCoords = objParser.getTextureCoords(); + final ArrayList triangleList = objParser.getTriangles(); + + // Overall byte count to stream: 12 for the 3 list-length ints, and then 4 for each vertex and + // texCoord int, and finally 2 for each triangle index short. + final int bbSize = + 12 + 4 * vertices.length + 4 * textureCoords.length + 2 * triangleList.size(); + + // Ensure ByteBuffer is native order, just like we want to read it in, but is NOT direct, so + // we can call .array() on it. + ByteBuffer bb = ByteBuffer.allocate(bbSize); + bb.order(ByteOrder.nativeOrder()); + + bb.putInt(vertices.length); + bb.putInt(textureCoords.length); + bb.putInt(triangleList.size()); + logString(String.format("Writing... Vertices: %d, TextureCoords: %d, Indices: %d.%n", + vertices.length, textureCoords.length, triangleList.size())); + for (float vertex : vertices) { + bb.putFloat(vertex); + } + for (float textureCoord : textureCoords) { + bb.putFloat(textureCoord); + } + for (Short vertexIndex : triangleList) { + bb.putShort(vertexIndex.shortValue()); + } + bb.position(0); + try { + outputStream.write(bb.array(), 0, bbSize); + logString(String.format("Processing successful! Took %.4f seconds.%n", + (System.nanoTime() - start) / NS_TO_SECONDS)); + } catch (Exception e) { + logString("Error writing during processing: " + e.toString()); + e.printStackTrace(writer); + } + } +} diff --git a/mediapipe/graphs/object_detection_3d/obj_parser/SimpleObjParser.java b/mediapipe/graphs/object_detection_3d/obj_parser/SimpleObjParser.java new file mode 100644 index 0000000..937fdff --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/obj_parser/SimpleObjParser.java @@ -0,0 +1,386 @@ +// Copyright 2021 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import static java.nio.charset.StandardCharsets.UTF_8; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +/** + * Class for parsing a single .obj file into openGL-usable pieces. + * + *

Usage: + * + *

SimpleObjParser objParser = new SimpleObjParser("animations/cow/cow320.obj", .015f); + * + *

if (objParser.parse()) { ... } + */ +public class SimpleObjParser { + private static class ShortPair { + private final Short first; + private final Short second; + + public ShortPair(Short newFirst, Short newSecond) { + first = newFirst; + second = newSecond; + } + + public Short getFirst() { + return first; + } + + public Short getSecond() { + return second; + } + } + + private static final String TAG = SimpleObjParser.class.getSimpleName(); + private static final boolean DEBUG = false; + private static final int INVALID_INDEX = -1; + private static final int POSITIONS_COORDS_PER_VERTEX = 3; + private static final int TEXTURE_COORDS_PER_VERTEX = 2; + private final String fileName; + + // Since .obj doesn't tie together texture coordinates and vertex + // coordinates, but OpenGL does, we need to keep a map of all such pairings that occur in + // our face list. + private final HashMap vertexTexCoordMap; + + // Internal (de-coupled) unique vertices and texture coordinates + private ArrayList vertices; + private ArrayList textureCoords; + + // Data we expose to openGL for rendering + private float[] finalizedVertices; + private float[] finalizedTextureCoords; + private ArrayList finalizedTriangles; + + // So we only display warnings about dropped w-coordinates once + private boolean vertexCoordIgnoredWarning; + private boolean textureCoordIgnoredWarning; + private boolean startedProcessingFaces; + + private int numPrimitiveVertices; + private int numPrimitiveTextureCoords; + private int numPrimitiveFaces; + + // For scratchwork, so we don't have to keep reallocating + private float[] tempCoords; + + // We scale all our position coordinates uniformly by this factor + private float objectUniformScaleFactor; + + public SimpleObjParser(String objFile, float scaleFactor) { + objectUniformScaleFactor = scaleFactor; + + fileName = objFile; + vertices = new ArrayList(); + textureCoords = new ArrayList(); + + vertexTexCoordMap = new HashMap(); + finalizedTriangles = new ArrayList(); + + tempCoords = new float[Math.max(POSITIONS_COORDS_PER_VERTEX, TEXTURE_COORDS_PER_VERTEX)]; + numPrimitiveFaces = 0; + + vertexCoordIgnoredWarning = false; + textureCoordIgnoredWarning = false; + startedProcessingFaces = false; + } + + // Simple helper wrapper function + private void debugLogString(String message) { + if (DEBUG) { + System.out.println(message); + } + } + + private void parseVertex(String[] linePieces) { + // Note: Traditionally xyzw is acceptable as a format, with w defaulting to 1.0, but for now + // we only parse xyz. + if (linePieces.length < POSITIONS_COORDS_PER_VERTEX + 1 + || linePieces.length > POSITIONS_COORDS_PER_VERTEX + 2) { + System.out.println("Malformed vertex coordinate specification, assuming xyz format only."); + return; + } else if (linePieces.length == POSITIONS_COORDS_PER_VERTEX + 2 && !vertexCoordIgnoredWarning) { + System.out.println( + "Only x, y, and z parsed for vertex coordinates; w coordinates will be ignored."); + vertexCoordIgnoredWarning = true; + } + + boolean success = true; + try { + for (int i = 1; i < POSITIONS_COORDS_PER_VERTEX + 1; i++) { + tempCoords[i - 1] = Float.parseFloat(linePieces[i]); + } + } catch (NumberFormatException e) { + success = false; + System.out.println("Malformed vertex coordinate error: " + e.toString()); + } + + if (success) { + for (int i = 0; i < POSITIONS_COORDS_PER_VERTEX; i++) { + vertices.add(Float.valueOf(tempCoords[i] * objectUniformScaleFactor)); + } + } + } + + private void parseTextureCoordinate(String[] linePieces) { + // Similar to vertices, uvw is acceptable as a format, with w defaulting to 0.0, but for now we + // only parse uv. + if (linePieces.length < TEXTURE_COORDS_PER_VERTEX + 1 + || linePieces.length > TEXTURE_COORDS_PER_VERTEX + 2) { + System.out.println("Malformed texture coordinate specification, assuming uv format only."); + return; + } else if (linePieces.length == (TEXTURE_COORDS_PER_VERTEX + 2) + && !textureCoordIgnoredWarning) { + debugLogString("Only u and v parsed for texture coordinates; w coordinates will be ignored."); + textureCoordIgnoredWarning = true; + } + + boolean success = true; + try { + for (int i = 1; i < TEXTURE_COORDS_PER_VERTEX + 1; i++) { + tempCoords[i - 1] = Float.parseFloat(linePieces[i]); + } + } catch (NumberFormatException e) { + success = false; + System.out.println("Malformed texture coordinate error: " + e.toString()); + } + + if (success) { + // .obj files treat (0,0) as top-left, compared to bottom-left for openGL. So invert "v" + // texture coordinate only here. + textureCoords.add(Float.valueOf(tempCoords[0])); + textureCoords.add(Float.valueOf(1.0f - tempCoords[1])); + } + } + + // Will return INVALID_INDEX if error occurs, and otherwise will return finalized (combined) + // index, adding and hashing new combinations as it sees them. + private short parseAndProcessCombinedVertexCoord(String coordString) { + String[] coords = coordString.split("/"); + try { + // Parse vertex and texture indices; 1-indexed from front if positive and from end of list if + // negative. + short vertexIndex = Short.parseShort(coords[0]); + short textureIndex = Short.parseShort(coords[1]); + if (vertexIndex > 0) { + vertexIndex--; + } else { + vertexIndex = (short) (vertexIndex + numPrimitiveVertices); + } + if (textureIndex > 0) { + textureIndex--; + } else { + textureIndex = (short) (textureIndex + numPrimitiveTextureCoords); + } + + // Combine indices and look up in pair map. + ShortPair indexPair = new ShortPair(Short.valueOf(vertexIndex), Short.valueOf(textureIndex)); + Short combinedIndex = vertexTexCoordMap.get(indexPair); + if (combinedIndex == null) { + short numIndexPairs = (short) vertexTexCoordMap.size(); + vertexTexCoordMap.put(indexPair, numIndexPairs); + return numIndexPairs; + } else { + return combinedIndex.shortValue(); + } + } catch (NumberFormatException e) { + // Failure to parse coordinates as shorts + return INVALID_INDEX; + } + } + + // Note: it is assumed that face list occurs AFTER vertex and texture coordinate lists finish in + // the obj file format. + private void parseFace(String[] linePieces) { + if (linePieces.length < 4) { + System.out.println("Malformed face index list: there must be at least 3 indices per face"); + return; + } + + short[] faceIndices = new short[linePieces.length - 1]; + boolean success = true; + for (int i = 1; i < linePieces.length; i++) { + short faceIndex = parseAndProcessCombinedVertexCoord(linePieces[i]); + + if (faceIndex < 0) { + System.out.println(faceIndex); + System.out.println("Malformed face index: " + linePieces[i]); + success = false; + break; + } + faceIndices[i - 1] = faceIndex; + } + + if (success) { + numPrimitiveFaces++; + // Manually triangulate the face under the assumption that the points are coplanar, the poly + // is convex, and the points are listed in either clockwise or anti-clockwise orientation. + for (int i = 1; i < faceIndices.length - 1; i++) { + // We use a triangle fan here, so first point is part of all triangles + finalizedTriangles.add(faceIndices[0]); + finalizedTriangles.add(faceIndices[i]); + finalizedTriangles.add(faceIndices[i + 1]); + } + } + } + + // Iterate over map and reconstruct proper vertex/texture coordinate pairings. + private boolean constructFinalCoordinatesFromMap() { + final int numIndexPairs = vertexTexCoordMap.size(); + // XYZ vertices and UV texture coordinates + finalizedVertices = new float[POSITIONS_COORDS_PER_VERTEX * numIndexPairs]; + finalizedTextureCoords = new float[TEXTURE_COORDS_PER_VERTEX * numIndexPairs]; + try { + for (Map.Entry entry : vertexTexCoordMap.entrySet()) { + ShortPair indexPair = entry.getKey(); + short rawVertexIndex = indexPair.getFirst().shortValue(); + short rawTexCoordIndex = indexPair.getSecond().shortValue(); + short finalIndex = entry.getValue().shortValue(); + for (int i = 0; i < POSITIONS_COORDS_PER_VERTEX; i++) { + finalizedVertices[POSITIONS_COORDS_PER_VERTEX * finalIndex + i] + = vertices.get(rawVertexIndex * POSITIONS_COORDS_PER_VERTEX + i); + } + for (int i = 0; i < TEXTURE_COORDS_PER_VERTEX; i++) { + finalizedTextureCoords[TEXTURE_COORDS_PER_VERTEX * finalIndex + i] + = textureCoords.get(rawTexCoordIndex * TEXTURE_COORDS_PER_VERTEX + i); + } + } + } catch (NumberFormatException e) { + System.out.println("Malformed index in vertex/texture coordinate mapping."); + return false; + } + return true; + } + + /** + * Returns the vertex position coordinate list (x1, y1, z1, x2, y2, z2, ...) after a successful + * call to parse(). + */ + public float[] getVertices() { + return finalizedVertices; + } + + /** + * Returns the vertex texture coordinate list (u1, v1, u2, v2, ...) after a successful call to + * parse(). + */ + public float[] getTextureCoords() { + return finalizedTextureCoords; + } + + /** + * Returns the list of indices (a1, b1, c1, a2, b2, c2, ...) after a successful call to parse(). + * Each (a, b, c) triplet specifies a triangle to be rendered, with a, b, and c Short objects used + * to index into the coordinates returned by getVertices() and getTextureCoords().

+ * For example, a Short index representing 5 should be used to index into vertices[15], + * vertices[16], and vertices[17], as well as textureCoords[10] and textureCoords[11]. + */ + public ArrayList getTriangles() { + return finalizedTriangles; + } + + /** + * Attempts to locate and read the specified .obj file, and parse it accordingly. None of the + * getter functions in this class will return valid results until a value of true is returned + * from this function. + * @return true on success. + */ + public boolean parse() { + boolean success = true; + BufferedReader reader = null; + try { + reader = Files.newBufferedReader(Paths.get(fileName), UTF_8); + String line; + while ((line = reader.readLine()) != null) { + // Skip over lines with no characters + if (line.length() < 1) { + continue; + } + + // Ignore comment lines entirely + if (line.charAt(0) == '#') { + continue; + } + + // Split into pieces based on whitespace, and process according to first command piece + String[] linePieces = line.split(" +"); + switch (linePieces[0]) { + case "v": + // Add vertex + if (startedProcessingFaces) { + throw new IOException("Vertices must all be declared before faces in obj files."); + } + parseVertex(linePieces); + break; + case "vt": + // Add texture coordinate + if (startedProcessingFaces) { + throw new IOException( + "Texture coordinates must all be declared before faces in obj files."); + } + parseTextureCoordinate(linePieces); + break; + case "f": + // Vertex and texture coordinate lists should be locked into place by now + if (!startedProcessingFaces) { + startedProcessingFaces = true; + numPrimitiveVertices = vertices.size() / POSITIONS_COORDS_PER_VERTEX; + numPrimitiveTextureCoords = textureCoords.size() / TEXTURE_COORDS_PER_VERTEX; + } + // Add face + parseFace(linePieces); + break; + default: + // Unknown or unused directive: ignoring + // Note: We do not yet process vertex normals or curves, so we ignore {vp, vn, s} + // Note: We assume only a single object, so we ignore {g, o} + // Note: We also assume a single texture, which we process independently, so we ignore + // {mtllib, usemtl} + break; + } + } + + // If we made it all the way through, then we have a vertex-to-tex-coord pair mapping, so + // construct our final vertex and texture coordinate lists now. + success = constructFinalCoordinatesFromMap(); + + } catch (IOException e) { + success = false; + System.out.println("Failure to parse obj file: " + e.toString()); + } finally { + try { + if (reader != null) { + reader.close(); + } + } catch (IOException e) { + System.out.println("Couldn't close reader"); + } + } + if (success) { + debugLogString("Successfully parsed " + numPrimitiveVertices + " vertices and " + + numPrimitiveTextureCoords + " texture coordinates into " + vertexTexCoordMap.size() + + " combined vertices and " + numPrimitiveFaces + " faces, represented as a mesh of " + + finalizedTriangles.size() / 3 + " triangles."); + } + return success; + } +} diff --git a/mediapipe/graphs/object_detection_3d/obj_parser/obj_cleanup.sh b/mediapipe/graphs/object_detection_3d/obj_parser/obj_cleanup.sh new file mode 100755 index 0000000..1573387 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/obj_parser/obj_cleanup.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Copyright 2021 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# The SimpleObjParser expects the obj commands to follow v/vt/f order. This +# little script will read all the obj files in a directory and sort the +# existing obj commands inside them to also follow this order (so all v lines +# will appear before all vt lines, which will appear before all f lines). + +# Usage: ./obj_cleanup.sh input_folder output_folder +# input_folder and output_folder paths can be absolute or relative. + +input_folder=$1 +output_folder=$2 +if [[ "${input_folder}" == "" ]]; then + echo "input_folder must be defined. Usage: ./obj_cleanup.sh input_folder output_folder" + exit 1 +fi +if [[ "${output_folder}" == "" ]]; then + echo "output_folder must be defined. Usage: ./obj_cleanup.sh input_folder output_folder" + exit 1 +fi + +# Find all the obj files and remove the directory name +# Interestingly, piping | sed 's!.obj!! also removed the extension obj too. +find "${input_folder}" -name "*.obj" | sed 's!.*/!!' | sort | +while IFS= read -r filename; do + echo "Clean up ${filename}" + cat "${input_folder}/${filename}" | grep 'v ' > "${output_folder}/${filename}" + cat "${input_folder}/${filename}" | grep 'vt ' >> "${output_folder}/${filename}" + cat "${input_folder}/${filename}" | grep 'f ' >> "${output_folder}/${filename}" +done diff --git a/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt b/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt new file mode 100644 index 0000000..10b11de --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/object_occlusion_tracking.pbtxt @@ -0,0 +1,122 @@ +# MediaPipe graph that performs box tracking with TensorFlow Lite on GPU. + +# Images coming into and out of the graph. +input_stream: "input_video" +input_stream: "WIDTH:input_width" +input_stream: "HEIGHT:input_height" +input_side_packet: "LABELS_CSV:allowed_labels" +input_side_packet: "MODEL_SCALE:model_scale" +input_side_packet: "MODEL_TRANSFORMATION:model_transformation" +input_side_packet: "TEXTURE:box_texture" +input_side_packet: "MAX_NUM_OBJECTS:max_num_objects" +input_side_packet: "ANIMATION_ASSET:box_asset_name" +input_side_packet: "MASK_TEXTURE:obj_texture" +input_side_packet: "MASK_ASSET:obj_asset_name" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Crops the image from the center to the size WIDTHxHEIGHT. +node: { + calculator: "ImageCroppingCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + output_stream: "IMAGE_GPU:throttled_input_video_3x4" + input_stream: "WIDTH:input_width" + input_stream: "HEIGHT:input_height" + node_options: { + [type.googleapis.com/mediapipe.ImageCroppingCalculatorOptions] { + border_mode: BORDER_REPLICATE + } + } +} + +node { + calculator: "ObjectronGpuSubgraph" + input_stream: "IMAGE_GPU:throttled_input_video_3x4" + input_side_packet: "LABELS_CSV:allowed_labels" + input_side_packet: "MAX_NUM_OBJECTS:max_num_objects" + output_stream: "FRAME_ANNOTATION:lifted_objects" +} + +# The rendering nodes: +# We are rendering two meshes: 1) a 3D bounding box, which we overlay directly +# on the texture, and 2) a virtual object, which we use as an occlusion mask. +# These models are designed using different tools, so we supply a transformation +# to bring both of them to the Objectron's coordinate system. + +# Creates a model matrices for the tracked object given the lifted 3D points. +# This calculator does two things: 1) Estimates object's pose (orientation, +# translation, and scale) from the 3D vertices, and +# 2) bring the object from the objectron's coordinate system to the renderer +# (OpenGL) coordinate system. Since the final goal is to render a mesh file on +# top of the object, we also supply a transformation to bring the mesh to the +# objectron's coordinate system, and rescale mesh to the unit size. +node { + calculator: "AnnotationsToModelMatricesCalculator" + input_stream: "ANNOTATIONS:lifted_objects" + output_stream: "MODEL_MATRICES:model_matrices" + node_options: { + [type.googleapis.com/mediapipe.AnnotationsToModelMatricesCalculatorOptions] { + # Re-scale the CAD model to the size of a unit box + model_scale: [0.04, 0.04, 0.04] + # Bring the box CAD model to objectron's coordinate system. This + # is equivalent of -pi/2 rotation along the y-axis (right-hand rule): + # Eigen::AngleAxisf(-M_PI / 2., Eigen::Vector3f::UnitY()) + model_transformation: [0.0, 0.0, -1.0, 0.0] + model_transformation: [0.0, 1.0, 0.0, 0.0] + model_transformation: [1.0, 0.0, 0.0, 0.0] + model_transformation: [0.0, 0.0, 0.0, 1.0] + } + } +} + +# Compute the model matrices for the CAD model of the virtual object, to be used +# as an occlusion mask. The model will be rendered at the exact same location as +# the bounding box. +node { + calculator: "AnnotationsToModelMatricesCalculator" + input_stream: "ANNOTATIONS:lifted_objects" + input_side_packet: "MODEL_SCALE:model_scale" + input_side_packet: "MODEL_TRANSFORMATION:model_transformation" + output_stream: "MODEL_MATRICES:mask_model_matrices" +} + +# Render everything together. First we render the 3D bounding box animation, +# then we render the occlusion mask. +node: { + calculator: "GlAnimationOverlayCalculator" + input_stream: "VIDEO:throttled_input_video_3x4" + input_stream: "MODEL_MATRICES:model_matrices" + input_stream: "MASK_MODEL_MATRICES:mask_model_matrices" + output_stream: "output_video" + input_side_packet: "TEXTURE:box_texture" + input_side_packet: "ANIMATION_ASSET:box_asset_name" + input_side_packet: "MASK_TEXTURE:obj_texture" + input_side_packet: "MASK_ASSET:obj_asset_name" + node_options: { + [type.googleapis.com/mediapipe.GlAnimationOverlayCalculatorOptions] { + aspect_ratio: 0.75 + vertical_fov_degrees: 70. + animation_speed_fps: 25 + } + } +} diff --git a/mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt b/mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt new file mode 100644 index 0000000..bda02b2 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/object_occlusion_tracking_1stage.pbtxt @@ -0,0 +1,133 @@ +# MediaPipe object detection 3D with tracking graph. + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +input_stream: "input_width" +input_stream: "input_height" +output_stream: "output_video" + +# Crops the image from the center to the size WIDTHxHEIGHT. +node: { + calculator: "ImageCroppingCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "IMAGE_GPU:input_video_4x3" + input_stream: "WIDTH:input_width" + input_stream: "HEIGHT:input_height" + node_options: { + [type.googleapis.com/mediapipe.ImageCroppingCalculatorOptions] { + border_mode: BORDER_REPLICATE + } + } +} + +# Creates a copy of the input_video stream. At the end of the graph, the +# GlAnimationOverlayCalculator will consume the input_video texture and draws +# on top of it. +node: { + calculator: "GlScalerCalculator" + input_stream: "VIDEO:input_video_4x3" + output_stream: "VIDEO:input_video_copy" +} + +# Resamples the images by specific frame rate. This calculator is used to +# control the frequecy of subsequent calculators/subgraphs, e.g. less power +# consumption for expensive process. +node { + calculator: "PacketResamplerCalculator" + input_stream: "DATA:input_video_copy" + output_stream: "DATA:sampled_input_video" + node_options: { + [type.googleapis.com/mediapipe.PacketResamplerCalculatorOptions] { + frame_rate: 5 + } + } +} + +node { + calculator: "ObjectronDetection1StageSubgraphGpu" + input_stream: "IMAGE_GPU:sampled_input_video" + output_stream: "ANNOTATIONS:objects" +} + +node { + calculator: "ObjectronTracking1StageSubgraphGpu" + input_stream: "FRAME_ANNOTATION:objects" + input_stream: "IMAGE_GPU:input_video_copy" + output_stream: "LIFTED_FRAME_ANNOTATION:lifted_tracked_objects" +} + +# The rendering nodes: +# We are rendering two meshes: 1) a 3D bounding box, which we overlay directly +# on the texture, and 2) a shoe CAD model, which we use as an occlusion mask. +# These models are designed using different tools, so we supply a transformation +# to bring both of them to the Objectron's coordinate system. + +# Creates a model matrices for the tracked object given the lifted 3D points. +# This calculator does two things: 1) Estimates object's pose (orientation, +# translation, and scale) from the 3D vertices, and +# 2) bring the object from the objectron's coordinate system to the renderer +# (OpenGL) coordinate system. Since the final goal is to render a mesh file on +# top of the object, we also supply a transformation to bring the mesh to the +# objectron's coordinate system, and rescale mesh to the unit size. +node { + calculator: "AnnotationsToModelMatricesCalculator" + input_stream: "ANNOTATIONS:lifted_tracked_objects" + output_stream: "MODEL_MATRICES:model_matrices" + node_options: { + [type.googleapis.com/mediapipe.AnnotationsToModelMatricesCalculatorOptions] { + # Re-scale the CAD model to the size of a unit box + model_scale: [0.05, 0.05, 0.05] + # Bring the box CAD model to objectron's coordinate system. This + # is equivalent of -pi/2 rotation along the y-axis (right-hand rule): + # Eigen::AngleAxisf(-M_PI / 2., Eigen::Vector3f::UnitY()) + model_transformation: [0.0, 0.0, -1.0, 0.0] + model_transformation: [0.0, 1.0, 0.0, 0.0] + model_transformation: [1.0, 0.0, 0.0, 0.0] + model_transformation: [0.0, 0.0, 0.0, 1.0] + } + } +} + +# Compute the model matrices for the CAD model of the chair, to be used as an +# occlusion mask. The model will be rendered at the exact same location as the +# bounding box. +node { + calculator: "AnnotationsToModelMatricesCalculator" + input_stream: "ANNOTATIONS:lifted_tracked_objects" + output_stream: "MODEL_MATRICES:mask_model_matrices" + node_options: { + [type.googleapis.com/mediapipe.AnnotationsToModelMatricesCalculatorOptions] { + # Re-scale the CAD model to the size of a unit box + model_scale: [0.15, 0.1, 0.15] + # Bring the CAD model to Deep Pursuit 3D's coordinate system. This + # is equivalent of -pi/2 rotation along the x-axis: + # Eigen::AngleAxisf(-M_PI / 2., Eigen::Vector3f::UnitX()) + model_transformation: [1.0, 0.0, 0.0, 0.0] + model_transformation: [0.0, 1.0, 0.0, -10.0] + model_transformation: [0.0, 0.0, -1.0, 0.0] + model_transformation: [0.0, 0.0, 0.0, 1.0] + } + } +} + +# Render everything together. First we render the 3D bounding box animation, +# then we render the occlusion mask. +node:{ + calculator:"GlAnimationOverlayCalculator" + input_stream:"VIDEO:input_video_4x3" + input_stream:"MODEL_MATRICES:model_matrices" + input_stream:"MASK_MODEL_MATRICES:mask_model_matrices" + output_stream:"output_video" + input_side_packet:"TEXTURE:box_texture" + input_side_packet:"ANIMATION_ASSET:box_asset_name" + input_side_packet:"MASK_TEXTURE:obj_texture" + input_side_packet:"MASK_ASSET:obj_asset_name" + node_options: { + [type.googleapis.com/mediapipe.GlAnimationOverlayCalculatorOptions] { + # Output resolution is 480x640 with the aspect ratio of 0.75 + aspect_ratio: 0.75 + vertical_fov_degrees: 70. + animation_speed_fps: 25 + } + } +} diff --git a/mediapipe/graphs/object_detection_3d/objectron_desktop_cpu.pbtxt b/mediapipe/graphs/object_detection_3d/objectron_desktop_cpu.pbtxt new file mode 100644 index 0000000..0a962d7 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/objectron_desktop_cpu.pbtxt @@ -0,0 +1,60 @@ +# MediaPipe Objectron 3D object detection on Desktop CPU. +input_side_packet: "INPUT_FILE_PATH:input_video_path" +input_side_packet: "FILE_PATH:0:box_landmark_model_path" +input_side_packet: "LABELS_CSV:allowed_labels" +input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + +# Generates side packet with max number of objects to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:max_num_objects" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 5 } + } + } +} + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +# Run Objectron subgraph. +node { + calculator: "ObjectronCpuSubgraph" + input_stream: "IMAGE:input_video" + input_side_packet: "MODEL_PATH:box_landmark_model_path" + input_side_packet: "LABELS_CSV:allowed_labels" + input_side_packet: "MAX_NUM_OBJECTS:max_num_objects" + output_stream: "MULTI_LANDMARKS:box_landmarks" + output_stream: "NORM_RECTS:box_rect" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see renderer_cpu.pbtxt). +node { + calculator: "RendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "MULTI_LANDMARKS:box_landmarks" + input_stream: "NORM_RECTS:box_rect" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/object_detection_3d/subgraphs/BUILD b/mediapipe/graphs/object_detection_3d/subgraphs/BUILD new file mode 100644 index 0000000..524ef9f --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/subgraphs/BUILD @@ -0,0 +1,37 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "renderer_cpu", + graph = "renderer_cpu.pbtxt", + register_as = "RendererSubgraph", + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + ], +) diff --git a/mediapipe/graphs/object_detection_3d/subgraphs/renderer_cpu.pbtxt b/mediapipe/graphs/object_detection_3d/subgraphs/renderer_cpu.pbtxt new file mode 100644 index 0000000..0f275a7 --- /dev/null +++ b/mediapipe/graphs/object_detection_3d/subgraphs/renderer_cpu.pbtxt @@ -0,0 +1,75 @@ +# MediaPipe Objectron vertices/landmarks rendering CPU subgraph. + +type: "RendererSubgraph" + +input_stream: "IMAGE:input_image" +input_stream: "MULTI_LANDMARKS:multi_landmarks" +input_stream: "NORM_RECTS:multi_rect" +output_stream: "IMAGE:output_image" + +# Outputs each element of multi_landmarks at a fake timestamp for the rest +# of the graph to process. At the end of the loop, outputs the BATCH_END +# timestamp for downstream calculators to inform them that all elements in the +# vector have been processed. +node { + calculator: "BeginLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITERABLE:multi_landmarks" + output_stream: "ITEM:single_landmarks" + output_stream: "BATCH_END:landmark_timestamp" +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:single_landmarks" + output_stream: "RENDER_DATA:single_landmark_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: [1, 2] # edge 1-2 + landmark_connections: [1, 3] # edge 1-3 + landmark_connections: [1, 5] # edge 1-5 + landmark_connections: [2, 4] # edge 2-4 + landmark_connections: [2, 6] # edge 2-6 + landmark_connections: [3, 4] # edge 3-4 + landmark_connections: [3, 7] # edge 3-7 + landmark_connections: [4, 8] # edge 4-8 + landmark_connections: [5, 6] # edge 5-6 + landmark_connections: [5, 7] # edge 5-7 + landmark_connections: [6, 8] # edge 6-8 + landmark_connections: [7, 8] # edge 7-8 + landmark_color { r: 255 g: 0 b: 0 } + connection_color { r: 0 g: 255 b: 0 } + thickness: 4.0 + } + } +} + +node { + calculator: "EndLoopRenderDataCalculator" + input_stream: "ITEM:single_landmark_render_data" + input_stream: "BATCH_END:landmark_timestamp" + output_stream: "ITERABLE:multi_landmarks_render_data" +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECTS:multi_rect" + output_stream: "RENDER_DATA:multi_rect_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "VECTOR:multi_landmarks_render_data" + input_stream: "multi_rect_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/pose_tracking/BUILD b/mediapipe/graphs/pose_tracking/BUILD new file mode 100644 index 0000000..26f607c --- /dev/null +++ b/mediapipe/graphs/pose_tracking/BUILD @@ -0,0 +1,56 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "pose_tracking_gpu_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/pose_tracking/subgraphs:pose_renderer_gpu", + "//mediapipe/modules/pose_landmark:pose_landmark_gpu", + ], +) + +mediapipe_binary_graph( + name = "pose_tracking_gpu_binary_graph", + graph = "pose_tracking_gpu.pbtxt", + output_name = "pose_tracking_gpu.binarypb", + deps = [":pose_tracking_gpu_deps"], +) + +cc_library( + name = "pose_tracking_cpu_deps", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/graphs/pose_tracking/subgraphs:pose_renderer_cpu", + "//mediapipe/modules/pose_landmark:pose_landmark_cpu", + ], +) + +mediapipe_binary_graph( + name = "pose_tracking_cpu_binary_graph", + graph = "pose_tracking_cpu.pbtxt", + output_name = "pose_tracking_cpu.binarypb", + deps = [":pose_tracking_cpu_deps"], +) diff --git a/mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt b/mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt new file mode 100644 index 0000000..31d847e --- /dev/null +++ b/mediapipe/graphs/pose_tracking/pose_tracking_cpu.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph that performs pose tracking with TensorFlow Lite on CPU. + +# CPU buffer. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" +# Pose landmarks. (NormalizedLandmarkList) +output_stream: "pose_landmarks" + +# Generates side packet to enable segmentation. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:enable_segmentation" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { bool_value: true } + } + } +} + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that detects poses and corresponding landmarks. +node { + calculator: "PoseLandmarkCpu" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "IMAGE:throttled_input_video" + output_stream: "LANDMARKS:pose_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" + output_stream: "DETECTION:pose_detection" + output_stream: "ROI_FROM_LANDMARKS:roi_from_landmarks" +} + +# Subgraph that renders pose-landmark annotation onto the input image. +node { + calculator: "PoseRendererCpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:pose_landmarks" + input_stream: "SEGMENTATION_MASK:segmentation_mask" + input_stream: "DETECTION:pose_detection" + input_stream: "ROI:roi_from_landmarks" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt b/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt new file mode 100644 index 0000000..35be3f0 --- /dev/null +++ b/mediapipe/graphs/pose_tracking/pose_tracking_gpu.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph that performs pose tracking with TensorFlow Lite on GPU. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" +# Pose landmarks. (NormalizedLandmarkList) +output_stream: "pose_landmarks" + +# Generates side packet to enable segmentation. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:enable_segmentation" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { bool_value: true } + } + } +} + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that detects poses and corresponding landmarks. +node { + calculator: "PoseLandmarkGpu" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "IMAGE:throttled_input_video" + output_stream: "LANDMARKS:pose_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" + output_stream: "DETECTION:pose_detection" + output_stream: "ROI_FROM_LANDMARKS:roi_from_landmarks" +} + +# Subgraph that renders pose-landmark annotation onto the input image. +node { + calculator: "PoseRendererGpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:pose_landmarks" + input_stream: "SEGMENTATION_MASK:segmentation_mask" + input_stream: "DETECTION:pose_detection" + input_stream: "ROI:roi_from_landmarks" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/pose_tracking/subgraphs/BUILD b/mediapipe/graphs/pose_tracking/subgraphs/BUILD new file mode 100644 index 0000000..fa34640 --- /dev/null +++ b/mediapipe/graphs/pose_tracking/subgraphs/BUILD @@ -0,0 +1,52 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "pose_renderer_gpu", + graph = "pose_renderer_gpu.pbtxt", + register_as = "PoseRendererGpu", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_scale_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_renderer_cpu", + graph = "pose_renderer_cpu.pbtxt", + register_as = "PoseRendererCpu", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_scale_calculator", + ], +) diff --git a/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_cpu.pbtxt b/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_cpu.pbtxt new file mode 100644 index 0000000..e176765 --- /dev/null +++ b/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_cpu.pbtxt @@ -0,0 +1,292 @@ +# MediaPipe pose landmarks rendering subgraph. + +type: "PoseRendererCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_image" +# Pose landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:pose_landmarks" +# Segmentation mask. (ImageFrame in ImageFormat::VEC32F1) +input_stream: "SEGMENTATION_MASK:segmentation_mask" +# Region of interest calculated based on landmarks. (NormalizedRect) +input_stream: "ROI:roi" +# Detected pose. (Detection) +input_stream: "DETECTION:detection" + +# CPU image with rendered data. (ImageFrame) +output_stream: "IMAGE:output_image" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_image" + output_stream: "SIZE:image_size" +} + +# Calculates rendering scale based on the pose roi. +node { + calculator: "RectToRenderScaleCalculator" + input_stream: "NORM_RECT:roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "RENDER_SCALE:render_scale" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderScaleCalculatorOptions] { + multiplier: 0.0012 + } + } +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTION:detection" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "visible_pose_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 25 } + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:pose_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 7 + landmark_connections: 0 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 11 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 17 + landmark_connections: 15 + landmark_connections: 19 + landmark_connections: 15 + landmark_connections: 21 + landmark_connections: 17 + landmark_connections: 19 + landmark_connections: 12 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 16 + landmark_connections: 16 + landmark_connections: 18 + landmark_connections: 16 + landmark_connections: 20 + landmark_connections: 16 + landmark_connections: 22 + landmark_connections: 18 + landmark_connections: 20 + landmark_connections: 11 + landmark_connections: 23 + landmark_connections: 12 + landmark_connections: 24 + landmark_connections: 23 + landmark_connections: 24 + landmark_connections: 23 + landmark_connections: 25 + landmark_connections: 24 + landmark_connections: 26 + landmark_connections: 25 + landmark_connections: 27 + landmark_connections: 26 + landmark_connections: 28 + landmark_connections: 27 + landmark_connections: 29 + landmark_connections: 28 + landmark_connections: 30 + landmark_connections: 29 + landmark_connections: 31 + landmark_connections: 30 + landmark_connections: 32 + landmark_connections: 27 + landmark_connections: 31 + landmark_connections: 28 + landmark_connections: 32 + + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Take left pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "landmarks_left_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 1 end: 4 } + ranges: { begin: 7 end: 8 } + ranges: { begin: 9 end: 10 } + ranges: { begin: 11 end: 12 } + ranges: { begin: 13 end: 14 } + ranges: { begin: 15 end: 16 } + ranges: { begin: 17 end: 18 } + ranges: { begin: 19 end: 20 } + ranges: { begin: 21 end: 22 } + ranges: { begin: 23 end: 24 } + + combine_outputs: true + } + } +} + +# Take right pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "landmarks_right_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 4 end: 7 } + ranges: { begin: 8 end: 9 } + ranges: { begin: 10 end: 11 } + ranges: { begin: 12 end: 13 } + ranges: { begin: 14 end: 15 } + ranges: { begin: 16 end: 17 } + ranges: { begin: 18 end: 19 } + ranges: { begin: 20 end: 21 } + ranges: { begin: 22 end: 23 } + ranges: { begin: 24 end: 25 } + + combine_outputs: true + } + } +} + +# Render pose joints as big white circles. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:visible_pose_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_background_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 5.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Render pose left side joints as orange circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_left_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_left_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 138 b: 0 } + connection_color { r: 255 g: 138 b: 0 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Render pose right side joints as cyan circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_right_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_right_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 217 b: 231 } + connection_color { r: 0 g: 217 b: 231 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:roi" + output_stream: "RENDER_DATA:roi_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Colors the segmentation mask with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE:input_image" + input_stream: "MASK:segmentation_mask" + output_stream: "IMAGE:segmented_image" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + invert_mask: true + adjust_with_luminance: false + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:segmented_image" + input_stream: "detection_render_data" + input_stream: "landmarks_render_data" + input_stream: "landmarks_background_joints_render_data" + input_stream: "landmarks_left_joints_render_data" + input_stream: "landmarks_right_joints_render_data" + input_stream: "roi_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_gpu.pbtxt b/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_gpu.pbtxt new file mode 100644 index 0000000..4d680c6 --- /dev/null +++ b/mediapipe/graphs/pose_tracking/subgraphs/pose_renderer_gpu.pbtxt @@ -0,0 +1,292 @@ +# MediaPipe pose landmarks rendering subgraph. + +type: "PoseRendererGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:input_image" +# Pose landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:pose_landmarks" +# Segmentation mask. (GpuBuffer in RGBA, with the same mask values in R and A) +input_stream: "SEGMENTATION_MASK:segmentation_mask" +# Region of interest calculated based on landmarks. (NormalizedRect) +input_stream: "ROI:roi" +# Detected pose. (Detection) +input_stream: "DETECTION:detection" + +# GPU image with rendered data. (GpuBuffer) +output_stream: "IMAGE:output_image" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_image" + output_stream: "SIZE:image_size" +} + +# Calculates rendering scale based on the pose roi. +node { + calculator: "RectToRenderScaleCalculator" + input_stream: "NORM_RECT:roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "RENDER_SCALE:render_scale" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderScaleCalculatorOptions] { + multiplier: 0.0012 + } + } +} + +# Converts detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTION:detection" + output_stream: "RENDER_DATA:detection_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 0 g: 255 b: 0 } + } + } +} + +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "visible_pose_landmarks" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 0 end: 25 } + } + } +} + +# Converts landmarks to drawing primitives for annotation overlay. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:pose_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_connections: 0 + landmark_connections: 1 + landmark_connections: 1 + landmark_connections: 2 + landmark_connections: 2 + landmark_connections: 3 + landmark_connections: 3 + landmark_connections: 7 + landmark_connections: 0 + landmark_connections: 4 + landmark_connections: 4 + landmark_connections: 5 + landmark_connections: 5 + landmark_connections: 6 + landmark_connections: 6 + landmark_connections: 8 + landmark_connections: 9 + landmark_connections: 10 + landmark_connections: 11 + landmark_connections: 12 + landmark_connections: 11 + landmark_connections: 13 + landmark_connections: 13 + landmark_connections: 15 + landmark_connections: 15 + landmark_connections: 17 + landmark_connections: 15 + landmark_connections: 19 + landmark_connections: 15 + landmark_connections: 21 + landmark_connections: 17 + landmark_connections: 19 + landmark_connections: 12 + landmark_connections: 14 + landmark_connections: 14 + landmark_connections: 16 + landmark_connections: 16 + landmark_connections: 18 + landmark_connections: 16 + landmark_connections: 20 + landmark_connections: 16 + landmark_connections: 22 + landmark_connections: 18 + landmark_connections: 20 + landmark_connections: 11 + landmark_connections: 23 + landmark_connections: 12 + landmark_connections: 24 + landmark_connections: 23 + landmark_connections: 24 + landmark_connections: 23 + landmark_connections: 25 + landmark_connections: 24 + landmark_connections: 26 + landmark_connections: 25 + landmark_connections: 27 + landmark_connections: 26 + landmark_connections: 28 + landmark_connections: 27 + landmark_connections: 29 + landmark_connections: 28 + landmark_connections: 30 + landmark_connections: 29 + landmark_connections: 31 + landmark_connections: 30 + landmark_connections: 32 + landmark_connections: 27 + landmark_connections: 31 + landmark_connections: 28 + landmark_connections: 32 + + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Take left pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "landmarks_left_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 1 end: 4 } + ranges: { begin: 7 end: 8 } + ranges: { begin: 9 end: 10 } + ranges: { begin: 11 end: 12 } + ranges: { begin: 13 end: 14 } + ranges: { begin: 15 end: 16 } + ranges: { begin: 17 end: 18 } + ranges: { begin: 19 end: 20 } + ranges: { begin: 21 end: 22 } + ranges: { begin: 23 end: 24 } + + combine_outputs: true + } + } +} + +# Take right pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "landmarks_right_side" + node_options: { + [type.googleapis.com/mediapipe.SplitVectorCalculatorOptions] { + ranges: { begin: 4 end: 7 } + ranges: { begin: 8 end: 9 } + ranges: { begin: 10 end: 11 } + ranges: { begin: 12 end: 13 } + ranges: { begin: 14 end: 15 } + ranges: { begin: 16 end: 17 } + ranges: { begin: 18 end: 19 } + ranges: { begin: 20 end: 21 } + ranges: { begin: 22 end: 23 } + ranges: { begin: 24 end: 25 } + + combine_outputs: true + } + } +} + +# Render pose joints as big white circles. +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:visible_pose_landmarks" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_background_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 255 b: 255 } + connection_color { r: 255 g: 255 b: 255 } + thickness: 5.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Render pose left side joints as orange circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_left_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_left_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 255 g: 138 b: 0 } + connection_color { r: 255 g: 138 b: 0 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Render pose right side joints as cyan circles (inside white ones). +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks_right_side" + input_stream: "RENDER_SCALE:render_scale" + output_stream: "RENDER_DATA:landmarks_right_joints_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 217 b: 231 } + connection_color { r: 0 g: 217 b: 231 } + thickness: 3.0 + visualize_landmark_depth: false + utilize_visibility: true + visibility_threshold: 0.5 + } + } +} + +# Converts normalized rects to drawing primitives for annotation overlay. +node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:roi" + output_stream: "RENDER_DATA:roi_render_data" + node_options: { + [type.googleapis.com/mediapipe.RectToRenderDataCalculatorOptions] { + filled: false + color { r: 255 g: 0 b: 0 } + thickness: 4.0 + } + } +} + +# Colors the segmentation mask with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE_GPU:input_image" + input_stream: "MASK_GPU:segmentation_mask" + output_stream: "IMAGE_GPU:segmented_image" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + invert_mask: true + adjust_with_luminance: false + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:segmented_image" + input_stream: "detection_render_data" + input_stream: "landmarks_render_data" + input_stream: "landmarks_background_joints_render_data" + input_stream: "landmarks_left_joints_render_data" + input_stream: "landmarks_right_joints_render_data" + input_stream: "roi_render_data" + output_stream: "IMAGE_GPU:output_image" +} diff --git a/mediapipe/graphs/selfie_segmentation/BUILD b/mediapipe/graphs/selfie_segmentation/BUILD new file mode 100644 index 0000000..ddca178 --- /dev/null +++ b/mediapipe/graphs/selfie_segmentation/BUILD @@ -0,0 +1,54 @@ +# Copyright 2021 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "selfie_segmentation_gpu_deps", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/modules/selfie_segmentation:selfie_segmentation_gpu", + ], +) + +mediapipe_binary_graph( + name = "selfie_segmentation_gpu_binary_graph", + graph = "selfie_segmentation_gpu.pbtxt", + output_name = "selfie_segmentation_gpu.binarypb", + deps = [":selfie_segmentation_gpu_deps"], +) + +cc_library( + name = "selfie_segmentation_cpu_deps", + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:recolor_calculator", + "//mediapipe/modules/selfie_segmentation:selfie_segmentation_cpu", + ], +) + +mediapipe_binary_graph( + name = "selfie_segmentation_cpu_binary_graph", + graph = "selfie_segmentation_cpu.pbtxt", + output_name = "selfie_segmentation_cpu.binarypb", + deps = [":selfie_segmentation_cpu_deps"], +) diff --git a/mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt b/mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt new file mode 100644 index 0000000..db1b479 --- /dev/null +++ b/mediapipe/graphs/selfie_segmentation/selfie_segmentation_cpu.pbtxt @@ -0,0 +1,52 @@ +# MediaPipe graph that performs selfie segmentation with TensorFlow Lite on CPU. + +# CPU buffer. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that performs selfie segmentation. +node { + calculator: "SelfieSegmentationCpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "SEGMENTATION_MASK:segmentation_mask" +} + + +# Colors the selfie segmentation with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE:throttled_input_video" + input_stream: "MASK:segmentation_mask" + output_stream: "IMAGE:output_video" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + invert_mask: true + adjust_with_luminance: false + } + } +} diff --git a/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt b/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt new file mode 100644 index 0000000..08d4c36 --- /dev/null +++ b/mediapipe/graphs/selfie_segmentation/selfie_segmentation_gpu.pbtxt @@ -0,0 +1,52 @@ +# MediaPipe graph that performs selfie segmentation with TensorFlow Lite on GPU. + +# GPU buffer. (GpuBuffer) +input_stream: "input_video" + +# Output image with rendered results. (GpuBuffer) +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that performs selfie segmentation. +node { + calculator: "SelfieSegmentationGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "SEGMENTATION_MASK:segmentation_mask" +} + + +# Colors the selfie segmentation with the color specified in the option. +node { + calculator: "RecolorCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "MASK_GPU:segmentation_mask" + output_stream: "IMAGE_GPU:output_video" + node_options: { + [type.googleapis.com/mediapipe.RecolorCalculatorOptions] { + color { r: 0 g: 0 b: 255 } + mask_channel: RED + invert_mask: true + adjust_with_luminance: false + } + } +} diff --git a/mediapipe/graphs/template_matching/BUILD b/mediapipe/graphs/template_matching/BUILD new file mode 100644 index 0000000..bc254d2 --- /dev/null +++ b/mediapipe/graphs/template_matching/BUILD @@ -0,0 +1,67 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "template_matching_deps", + deps = [ + "//mediapipe/calculators/image:feature_detector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_floats_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:timed_box_list_id_to_label_calculator", + "//mediapipe/calculators/util:timed_box_list_to_render_data_calculator", + "//mediapipe/calculators/video:box_detector_calculator", + ], +) + +cc_library( + name = "desktop_calculators", + deps = [ + ":template_matching_deps", + "//mediapipe/calculators/image:opencv_encoded_image_to_image_frame_calculator", + "//mediapipe/calculators/util:local_file_pattern_contents_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + ], +) + +cc_library( + name = "mobile_calculators", + deps = [ + ":template_matching_deps", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + ], +) + +mediapipe_binary_graph( + name = "mobile_cpu_binary_graph", + graph = "template_matching_mobile_cpu.pbtxt", + output_name = "mobile_cpu.binarypb", + deps = [":mobile_calculators"], +) diff --git a/mediapipe/graphs/template_matching/index_building.pbtxt b/mediapipe/graphs/template_matching/index_building.pbtxt new file mode 100644 index 0000000..8228139 --- /dev/null +++ b/mediapipe/graphs/template_matching/index_building.pbtxt @@ -0,0 +1,92 @@ +# MediaPipe graph that build feature descriptors index for specific target. + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "LocalFilePatternContentsCalculator" + input_side_packet: "FILE_DIRECTORY:file_directory" + input_side_packet: "FILE_SUFFIX:file_suffix" + output_stream: "CONTENTS:encoded_image" +} + +node { + calculator: "OpenCvEncodedImageToImageFrameCalculator" + input_stream: "encoded_image" + output_stream: "image_frame" +} + +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:image_frame" + output_stream: "IMAGE:scaled_image_frame" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + scale_mode: FILL_AND_CROP + } + } +} + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:scaled_image_frame" + output_stream: "SIZE:input_video_size" +} + +node { + calculator: "FeatureDetectorCalculator" + input_stream: "IMAGE:scaled_image_frame" + output_stream: "FEATURES:features" + output_stream: "LANDMARKS:landmarks" + output_stream: "PATCHES:patches" + node_options: { + [type.googleapis.com/mediapipe.FeatureDetectorCalculatorOptions] { + max_features: 400 + } + } +} + +# input tensors: 200*32*32*1 float +# output tensors: 200*40 float, only first keypoint.size()*40 is knift features, +# rest is padded by zero. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:patches" + output_stream: "TENSORS:knift_feature_tensors" + input_stream_handler { + input_stream_handler: "DefaultInputStreamHandler" + } + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/knift_float_400.tflite" + } + } +} + +node { + calculator: "TfLiteTensorsToFloatsCalculator" + input_stream: "TENSORS:knift_feature_tensors" + output_stream: "FLOATS:knift_feature_floats" +} + +node { + calculator: "BoxDetectorCalculator" + input_side_packet: "OUTPUT_INDEX_FILENAME:output_index_filename" + input_stream: "FEATURES:features" + input_stream: "IMAGE_SIZE:input_video_size" + input_stream: "DESCRIPTORS:knift_feature_floats" + + node_options: { + [type.googleapis.com/mediapipe.BoxDetectorCalculatorOptions] { + detector_options { + index_type: OPENCV_BF + detect_every_n_frame: 1 + } + } + } +} diff --git a/mediapipe/graphs/template_matching/template_matching_desktop.pbtxt b/mediapipe/graphs/template_matching/template_matching_desktop.pbtxt new file mode 100644 index 0000000..d44a7e5 --- /dev/null +++ b/mediapipe/graphs/template_matching/template_matching_desktop.pbtxt @@ -0,0 +1,141 @@ +# MediaPipe graph that performs object detection on desktop with TensorFlow Lite +# on CPU. +# Used in the example in +# mediapipe/examples/desktop/template_matching:template_matching_tflite + +# max_queue_size limits the number of packets enqueued on any input stream +# by throttling inputs to the graph. This makes the graph only process one +# frame per time. +max_queue_size: 1 + +# Decodes an input video file into images and a video header. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:input_video" + output_stream: "IMAGE:scaled_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 640 + output_height: 640 + scale_mode: FILL_AND_CROP + } + } +} + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:scaled_input_video" + output_stream: "SIZE:input_video_size" +} + +node { + calculator: "FeatureDetectorCalculator" + input_stream: "IMAGE:scaled_input_video" + output_stream: "FEATURES:features" + output_stream: "LANDMARKS:landmarks" + output_stream: "PATCHES:patches" +} + +# input tensors: 200*32*32*1 float +# output tensors: 200*40 float, only first keypoint.size()*40 is knift features, +# rest is padded by zero. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:patches" + output_stream: "TENSORS:knift_feature_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/knift_float.tflite" + } + } +} + +node { + calculator: "TfLiteTensorsToFloatsCalculator" + input_stream: "TENSORS:knift_feature_tensors" + output_stream: "FLOATS:knift_feature_floats" +} + +node { + calculator: "BoxDetectorCalculator" + input_stream: "FEATURES:features" + input_stream: "IMAGE_SIZE:input_video_size" + input_stream: "DESCRIPTORS:knift_feature_floats" + output_stream: "BOXES:detections" + + node_options: { + [type.googleapis.com/mediapipe.BoxDetectorCalculatorOptions] { + detector_options { + index_type: OPENCV_BF + detect_every_n_frame: 1 + } + index_proto_filename: "mediapipe/models/knift_index.pb" + } + } +} + +node { + calculator: "TimedBoxListIdToLabelCalculator" + input_stream: "detections" + output_stream: "labeled_detections" + node_options: { + [type.googleapis.com/mediapipe.TimedBoxListIdToLabelCalculatorOptions] { + label_map_path: "mediapipe/models/knift_labelmap.txt" + } + } +} + +node { + calculator: "TimedBoxListToRenderDataCalculator" + input_stream: "BOX_LIST:labeled_detections" + output_stream: "RENDER_DATA:box_render_data" + node_options: { + [type.googleapis.com/mediapipe.TimedBoxListToRenderDataCalculatorOptions] { + box_color { r: 255 g: 0 b: 0 } + thickness: 5.0 + } + } +} + +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 255 b: 0 } + thickness: 2.0 + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "box_render_data" + input_stream: "landmarks_render_data" + output_stream: "IMAGE:output_video" +} + +# Encodes the annotated images into a video file, adopting properties specified +# in the input video header, e.g., video framerate. +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} diff --git a/mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt b/mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt new file mode 100644 index 0000000..e02e12d --- /dev/null +++ b/mediapipe/graphs/template_matching/template_matching_mobile_cpu.pbtxt @@ -0,0 +1,137 @@ +# MediaPipe graph that performs template matching with TensorFlow Lite on CPU. +# Used in the examples in +# mediapipe/examples/android/src/java/com/mediapipe/apps/templatematchingcpu + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Throttles the images flowing downstream for flow control. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Transfers the input image from GPU to CPU memory. +node: { + calculator: "GpuBufferToImageFrameCalculator" + input_stream: "throttled_input_video" + output_stream: "input_video_cpu" +} + +# Scale the image's longer side to 640, keeping aspect ratio. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:input_video_cpu" + output_stream: "IMAGE:transformed_input_video_cpu" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 640 + output_height: 640 + scale_mode: FILL_AND_CROP + } + } +} + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:transformed_input_video_cpu" + output_stream: "SIZE:input_video_size" +} + +node { + calculator: "FeatureDetectorCalculator" + input_stream: "IMAGE:transformed_input_video_cpu" + output_stream: "FEATURES:features" + output_stream: "LANDMARKS:landmarks" + output_stream: "PATCHES:patches" +} + +# input tensors: 200*32*32*1 float +# output tensors: 200*40 float, only first keypoint.size()*40 is knift features, +# rest is padded by zero. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:patches" + output_stream: "TENSORS:knift_feature_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/knift_float.tflite" + delegate { xnnpack {} } + } + } +} + +node { + calculator: "TfLiteTensorsToFloatsCalculator" + input_stream: "TENSORS:knift_feature_tensors" + output_stream: "FLOATS:knift_feature_floats" +} + +node { + calculator: "BoxDetectorCalculator" + input_stream: "FEATURES:features" + input_stream: "IMAGE_SIZE:input_video_size" + input_stream: "DESCRIPTORS:knift_feature_floats" + output_stream: "BOXES:detections" + + node_options: { + [type.googleapis.com/mediapipe.BoxDetectorCalculatorOptions] { + detector_options { + index_type: OPENCV_BF + detect_every_n_frame: 1 + } + index_proto_filename: "mediapipe/models/knift_index.pb" + } + } +} + +node { + calculator: "TimedBoxListIdToLabelCalculator" + input_stream: "detections" + output_stream: "labeled_detections" + node_options: { + [type.googleapis.com/mediapipe.TimedBoxListIdToLabelCalculatorOptions] { + label_map_path: "mediapipe/models/knift_labelmap.txt" + } + } +} + +node { + calculator: "TimedBoxListToRenderDataCalculator" + input_stream: "BOX_LIST:labeled_detections" + output_stream: "RENDER_DATA:box_render_data" + node_options: { + [type.googleapis.com/mediapipe.TimedBoxListToRenderDataCalculatorOptions] { + box_color { r: 255 g: 0 b: 0 } + thickness: 5.0 + } + } +} + +node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "RENDER_DATA:landmarks_render_data" + node_options: { + [type.googleapis.com/mediapipe.LandmarksToRenderDataCalculatorOptions] { + landmark_color { r: 0 g: 255 b: 0 } + thickness: 2.0 + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:throttled_input_video" + input_stream: "box_render_data" + input_stream: "landmarks_render_data" + output_stream: "IMAGE_GPU:output_video" +} diff --git a/mediapipe/graphs/tracking/BUILD b/mediapipe/graphs/tracking/BUILD new file mode 100644 index 0000000..9e6e75f --- /dev/null +++ b/mediapipe/graphs/tracking/BUILD @@ -0,0 +1,49 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_binary_graph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "mobile_calculators", + deps = [ + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/graphs/tracking/subgraphs:object_detection_gpu", + "//mediapipe/graphs/tracking/subgraphs:object_tracking_gpu", + "//mediapipe/graphs/tracking/subgraphs:renderer_gpu", + ], +) + +cc_library( + name = "desktop_calculators", + deps = [ + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/graphs/tracking/subgraphs:object_detection_cpu", + "//mediapipe/graphs/tracking/subgraphs:object_tracking_cpu", + "//mediapipe/graphs/tracking/subgraphs:renderer_cpu", + ], +) + +mediapipe_binary_graph( + name = "mobile_gpu_binary_graph", + graph = "object_detection_tracking_mobile_gpu.pbtxt", + output_name = "mobile_gpu.binarypb", + deps = [":mobile_calculators"], +) diff --git a/mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt b/mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt new file mode 100644 index 0000000..4b21ee5 --- /dev/null +++ b/mediapipe/graphs/tracking/object_detection_tracking_desktop_live.pbtxt @@ -0,0 +1,45 @@ +# MediaPipe graph that performs object detection and tracking with TensorFlow +# Lite on CPU. +# Used in the examples in +# mediapipie/examples/desktop/object_tracking/ + +# Images on CPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Resamples the images by specific frame rate. This calculator is used to +# control the frequecy of subsequent calculators/subgraphs, e.g. less power +# consumption for expensive process. +node { + calculator: "PacketResamplerCalculator" + input_stream: "DATA:input_video" + output_stream: "DATA:throttled_input_video" + node_options: { + [type.googleapis.com/mediapipe.PacketResamplerCalculatorOptions] { + frame_rate: 3 + } + } +} + +# Subgraph that detections objects (see object_detection_cpu.pbtxt). +node { + calculator: "ObjectDetectionSubgraphCpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:output_detections" +} + +# Subgraph that tracks objects (see object_tracking_cpu.pbtxt). +node { + calculator: "ObjectTrackingSubgraphCpu" + input_stream: "VIDEO:input_video" + input_stream: "DETECTIONS:output_detections" + output_stream: "DETECTIONS:tracked_detections" +} + +# Subgraph that renders annotations and overlays them on top of input images (see renderer_cpu.pbtxt). +node { + calculator: "RendererSubgraphCpu" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:tracked_detections" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt b/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt new file mode 100644 index 0000000..0ef9830 --- /dev/null +++ b/mediapipe/graphs/tracking/object_detection_tracking_mobile_gpu.pbtxt @@ -0,0 +1,46 @@ +# MediaPipe graph that performs object detection and tracking with TensorFlow +# Lite on GPU. +# Used in the examples in +# mediapipie/examples/android/src/java/com/mediapipe/apps/objecttrackinggpu + +# Images on GPU coming into and out of the graph. +input_stream: "input_video" +output_stream: "output_video" + +# Resamples the images by specific frame rate. This calculator is used to +# control the frequecy of subsequent calculators/subgraphs, e.g. less power +# consumption for expensive process. +node { + calculator: "PacketResamplerCalculator" + input_stream: "DATA:input_video" + output_stream: "DATA:throttled_input_video" + node_options: { + [type.googleapis.com/mediapipe.PacketResamplerCalculatorOptions] { + frame_rate: 0.5 + } + } +} + +# Subgraph that detections objects (see object_detection_gpu.pbtxt). +node { + calculator: "ObjectDetectionSubgraphGpu" + input_stream: "IMAGE:throttled_input_video" + output_stream: "DETECTIONS:output_detections" +} + +# Subgraph that tracks objects (see object_tracking_gpu.pbtxt). +node { + calculator: "ObjectTrackingSubgraphGpu" + input_stream: "VIDEO:input_video" + input_stream: "DETECTIONS:output_detections" + output_stream: "DETECTIONS:tracked_detections" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see renderer_gpu.pbtxt). +node { + calculator: "RendererSubgraphGpu" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:tracked_detections" + output_stream: "IMAGE:output_video" +} diff --git a/mediapipe/graphs/tracking/subgraphs/BUILD b/mediapipe/graphs/tracking/subgraphs/BUILD new file mode 100644 index 0000000..16f87f3 --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/BUILD @@ -0,0 +1,129 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "box_tracking_gpu", + graph = "box_tracking_gpu.pbtxt", + register_as = "BoxTrackingSubgraphGpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/video:box_tracker_calculator", + "//mediapipe/calculators/video:flow_packager_calculator", + "//mediapipe/calculators/video:motion_analysis_calculator", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "//mediapipe/framework/stream_handler:sync_set_input_stream_handler", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "box_tracking_cpu", + graph = "box_tracking_cpu.pbtxt", + register_as = "BoxTrackingSubgraphCpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/video:box_tracker_calculator", + "//mediapipe/calculators/video:flow_packager_calculator", + "//mediapipe/calculators/video:motion_analysis_calculator", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "//mediapipe/framework/stream_handler:sync_set_input_stream_handler", + ], +) + +mediapipe_simple_subgraph( + name = "object_tracking_gpu", + graph = "object_tracking_gpu.pbtxt", + register_as = "ObjectTrackingSubgraphGpu", + deps = [ + "//mediapipe/calculators/util:detection_unique_id_calculator", + "//mediapipe/calculators/util:detections_to_timed_box_list_calculator", + "//mediapipe/calculators/video:tracked_detection_manager_calculator", + "//mediapipe/framework/stream_handler:sync_set_input_stream_handler", + "//mediapipe/graphs/tracking/subgraphs:box_tracking_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "object_tracking_cpu", + graph = "object_tracking_cpu.pbtxt", + register_as = "ObjectTrackingSubgraphCpu", + deps = [ + "//mediapipe/calculators/util:detection_unique_id_calculator", + "//mediapipe/calculators/util:detections_to_timed_box_list_calculator", + "//mediapipe/calculators/video:tracked_detection_manager_calculator", + "//mediapipe/framework/stream_handler:sync_set_input_stream_handler", + "//mediapipe/graphs/tracking/subgraphs:box_tracking_cpu", + ], +) + +mediapipe_simple_subgraph( + name = "object_detection_gpu", + graph = "object_detection_gpu.pbtxt", + register_as = "ObjectDetectionSubgraphGpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_detections_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "object_detection_cpu", + graph = "object_detection_cpu.pbtxt", + register_as = "ObjectDetectionSubgraphCpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_detections_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "renderer_gpu", + graph = "renderer_gpu.pbtxt", + register_as = "RendererSubgraphGpu", + deps = [ + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "renderer_cpu", + graph = "renderer_cpu.pbtxt", + register_as = "RendererSubgraphCpu", + deps = [ + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:detections_to_render_data_calculator", + "//mediapipe/calculators/util:rect_to_render_data_calculator", + ], +) diff --git a/mediapipe/graphs/tracking/subgraphs/box_tracking_cpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/box_tracking_cpu.pbtxt new file mode 100644 index 0000000..b8c4e2f --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/box_tracking_cpu.pbtxt @@ -0,0 +1,119 @@ +# MediaPipe box tracking subgraph. + +type: "BoxTrackingSubgraphCpu" + +input_stream: "VIDEO:input_video" +input_stream: "BOXES:start_pos" +input_stream: "CANCEL_ID:cancel_object_id" +output_stream: "BOXES:boxes" + +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:input_video" + output_stream: "IMAGE:downscaled_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 240 + } + } +} + +# Performs motion analysis on an incoming video stream. +node: { + calculator: "MotionAnalysisCalculator" + input_stream: "VIDEO:downscaled_input_video" + output_stream: "CAMERA:camera_motion" + output_stream: "FLOW:region_flow" + + node_options: { + [type.googleapis.com/mediapipe.MotionAnalysisCalculatorOptions]: { + analysis_options { + analysis_policy: ANALYSIS_POLICY_CAMERA_MOBILE + flow_options { + fast_estimation_min_block_size: 100 + top_inlier_sets: 1 + frac_inlier_error_threshold: 3e-3 + downsample_mode: DOWNSAMPLE_TO_INPUT_SIZE + verification_distance: 5.0 + verify_long_feature_acceleration: true + verify_long_feature_trigger_ratio: 0.1 + tracking_options { + max_features: 500 + adaptive_extraction_levels: 2 + min_eig_val_settings { + adaptive_lowest_quality_level: 2e-4 + } + klt_tracker_implementation: KLT_OPENCV + } + } + } + } + } +} + +# Reads optical flow fields defined in +# mediapipe/framework/formats/motion/optical_flow_field.h, +# returns a VideoFrame with 2 channels (v_x and v_y), each channel is quantized +# to 0-255. +node: { + calculator: "FlowPackagerCalculator" + input_stream: "FLOW:region_flow" + input_stream: "CAMERA:camera_motion" + output_stream: "TRACKING:tracking_data" + + node_options: { + [type.googleapis.com/mediapipe.FlowPackagerCalculatorOptions]: { + flow_packager_options: { + binary_tracking_data_support: false + } + } + } +} + +# Tracks box positions over time. +node: { + calculator: "BoxTrackerCalculator" + input_stream: "TRACKING:tracking_data" + input_stream: "TRACK_TIME:input_video" + input_stream: "START_POS:start_pos" + input_stream: "CANCEL_OBJECT_ID:cancel_object_id" + input_stream_info: { + tag_index: "CANCEL_OBJECT_ID" + back_edge: true + } + output_stream: "BOXES:boxes" + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "TRACKING" + tag_index: "TRACK_TIME" + } + sync_set { + tag_index: "START_POS" + } + sync_set { + tag_index: "CANCEL_OBJECT_ID" + } + } + } + } + + node_options: { + [type.googleapis.com/mediapipe.BoxTrackerCalculatorOptions]: { + tracker_options: { + track_step_options { + track_object_and_camera: true + tracking_degrees: TRACKING_DEGREE_OBJECT_SCALE + inlier_spring_force: 0.0 + static_motion_temporal_ratio: 3e-2 + } + } + visualize_tracking_data: false + streaming_track_data_cache_size: 100 + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt new file mode 100644 index 0000000..cab2b77 --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt @@ -0,0 +1,126 @@ +# MediaPipe box tracking subgraph. + +type: "BoxTrackingSubgraphGpu" + +input_stream: "VIDEO:input_video" +input_stream: "BOXES:start_pos" +input_stream: "CANCEL_ID:cancel_object_id" +output_stream: "BOXES:boxes" + +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "IMAGE_GPU:downscaled_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 240 + output_height: 320 + } + } +} + +# Converts GPU buffer to ImageFrame for processing tracking. +node: { + calculator: "GpuBufferToImageFrameCalculator" + input_stream: "downscaled_input_video" + output_stream: "downscaled_input_video_cpu" +} + +# Performs motion analysis on an incoming video stream. +node: { + calculator: "MotionAnalysisCalculator" + input_stream: "VIDEO:downscaled_input_video_cpu" + output_stream: "CAMERA:camera_motion" + output_stream: "FLOW:region_flow" + + node_options: { + [type.googleapis.com/mediapipe.MotionAnalysisCalculatorOptions]: { + analysis_options { + analysis_policy: ANALYSIS_POLICY_CAMERA_MOBILE + flow_options { + fast_estimation_min_block_size: 100 + top_inlier_sets: 1 + frac_inlier_error_threshold: 3e-3 + downsample_mode: DOWNSAMPLE_TO_INPUT_SIZE + verification_distance: 5.0 + verify_long_feature_acceleration: true + verify_long_feature_trigger_ratio: 0.1 + tracking_options { + max_features: 500 + adaptive_extraction_levels: 2 + min_eig_val_settings { + adaptive_lowest_quality_level: 2e-4 + } + klt_tracker_implementation: KLT_OPENCV + } + } + } + } + } +} + +# Reads optical flow fields defined in +# mediapipe/framework/formats/motion/optical_flow_field.h, +# returns a VideoFrame with 2 channels (v_x and v_y), each channel is quantized +# to 0-255. +node: { + calculator: "FlowPackagerCalculator" + input_stream: "FLOW:region_flow" + input_stream: "CAMERA:camera_motion" + output_stream: "TRACKING:tracking_data" + + node_options: { + [type.googleapis.com/mediapipe.FlowPackagerCalculatorOptions]: { + flow_packager_options: { + binary_tracking_data_support: false + } + } + } +} + +# Tracks box positions over time. +node: { + calculator: "BoxTrackerCalculator" + input_stream: "TRACKING:tracking_data" + input_stream: "TRACK_TIME:input_video" + input_stream: "START_POS:start_pos" + input_stream: "CANCEL_OBJECT_ID:cancel_object_id" + input_stream_info: { + tag_index: "CANCEL_OBJECT_ID" + back_edge: true + } + output_stream: "BOXES:boxes" + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "TRACKING" + tag_index: "TRACK_TIME" + } + sync_set { + tag_index: "START_POS" + } + sync_set { + tag_index: "CANCEL_OBJECT_ID" + } + } + } + } + + node_options: { + [type.googleapis.com/mediapipe.BoxTrackerCalculatorOptions]: { + tracker_options: { + track_step_options { + track_object_and_camera: true + tracking_degrees: TRACKING_DEGREE_OBJECT_SCALE + inlier_spring_force: 0.0 + static_motion_temporal_ratio: 3e-2 + } + } + visualize_tracking_data: false + streaming_track_data_cache_size: 100 + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/object_detection_cpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/object_detection_cpu.pbtxt new file mode 100644 index 0000000..54d6af3 --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/object_detection_cpu.pbtxt @@ -0,0 +1,128 @@ +# MediaPipe object detection subgraph. + +type: "ObjectDetectionSubgraphCpu" + +input_stream: "IMAGE:input_video" +output_stream: "DETECTIONS:output_detections" + +# Transforms the input image on CPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:input_video" + output_stream: "IMAGE:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on CPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:transformed_input_video" + output_stream: "TENSORS:image_tensor" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + max_num_detections: 3 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/object_detection_gpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/object_detection_gpu.pbtxt new file mode 100644 index 0000000..f3cc2c8 --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/object_detection_gpu.pbtxt @@ -0,0 +1,128 @@ +# MediaPipe object detection subgraph. + +type: "ObjectDetectionSubgraphGpu" + +input_stream: "IMAGE:input_video" +output_stream: "DETECTIONS:output_detections" + +# Transforms the input image on GPU to a 320x320 image. To scale the image, by +# default it uses the STRETCH scale mode that maps the entire input image to the +# entire transformed image. As a result, image aspect ratio may be changed and +# objects in the image may be deformed (stretched or squeezed), but the object +# detection model used in this graph is agnostic to that deformation. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "IMAGE_GPU:transformed_input_video" + node_options: { + [type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] { + output_width: 320 + output_height: 320 + } + } +} + +# Converts the transformed input image on GPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE_GPU:transformed_input_video" + output_stream: "TENSORS_GPU:image_tensor" +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS_GPU:image_tensor" + output_stream: "TENSORS_GPU:detection_tensors" + node_options: { + [type.googleapis.com/mediapipe.TfLiteInferenceCalculatorOptions] { + model_path: "mediapipe/models/ssdlite_object_detection.tflite" + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + node_options: { + [type.googleapis.com/mediapipe.SsdAnchorsCalculatorOptions] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 320 + input_size_width: 320 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TfLiteTensorsToDetectionsCalculator" + input_stream: "TENSORS_GPU:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:detections" + node_options: { + [type.googleapis.com/mediapipe.TfLiteTensorsToDetectionsCalculatorOptions] { + num_classes: 91 + num_boxes: 2034 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "detections" + output_stream: "filtered_detections" + node_options: { + [type.googleapis.com/mediapipe.NonMaxSuppressionCalculatorOptions] { + min_suppression_threshold: 0.4 + max_num_detections: 3 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "filtered_detections" + output_stream: "output_detections" + node_options: { + [type.googleapis.com/mediapipe.DetectionLabelIdToTextCalculatorOptions] { + label_map_path: "mediapipe/models/ssdlite_object_detection_labelmap.txt" + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/object_tracking_cpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/object_tracking_cpu.pbtxt new file mode 100644 index 0000000..9ac7978 --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/object_tracking_cpu.pbtxt @@ -0,0 +1,56 @@ +# MediaPipe object tracking subgraph. + +type: "ObjectTrackingSubgraphCpu" + +input_stream: "VIDEO:input_video" +input_stream: "DETECTIONS:new_detections" +output_stream: "DETECTIONS:tracked_detections" + +# Assigns an unique id for each new detection. +node { + calculator: "DetectionUniqueIdCalculator" + input_stream: "DETECTIONS:new_detections" + output_stream: "DETECTIONS:detections_with_id" +} + +# Converts detections to TimedBox protos which are used as initial location +# for tracking. +node { + calculator: "DetectionsToTimedBoxListCalculator" + input_stream: "DETECTIONS:detections_with_id" + output_stream: "BOXES:start_pos" +} + +# Subgraph that tracks boxes (see box_tracking_cpu.pbtxt). +node { + calculator: "BoxTrackingSubgraphCpu" + input_stream: "VIDEO:input_video" + input_stream: "BOXES:start_pos" + input_stream: "CANCEL_ID:cancel_object_id" + output_stream: "BOXES:boxes" +} + +# Managers new detected objects and objects that are being tracked. +# It associates the duplicated detections and updates the locations of +# detections from tracking. +node: { + calculator: "TrackedDetectionManagerCalculator" + input_stream: "DETECTIONS:detections_with_id" + input_stream: "TRACKING_BOXES:boxes" + output_stream: "DETECTIONS:tracked_detections" + output_stream: "CANCEL_OBJECT_ID:cancel_object_id" + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "TRACKING_BOXES" + } + sync_set { + tag_index: "DETECTIONS" + } + } + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/object_tracking_gpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/object_tracking_gpu.pbtxt new file mode 100644 index 0000000..ab27dbd --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/object_tracking_gpu.pbtxt @@ -0,0 +1,56 @@ +# MediaPipe object tracking subgraph. + +type: "ObjectTrackingSubgraphGpu" + +input_stream: "VIDEO:input_video" +input_stream: "DETECTIONS:new_detections" +output_stream: "DETECTIONS:tracked_detections" + +# Assigns an unique id for each new detection. +node { + calculator: "DetectionUniqueIdCalculator" + input_stream: "DETECTIONS:new_detections" + output_stream: "DETECTIONS:detections_with_id" +} + +# Converts detections to TimedBox protos which are used as initial location +# for tracking. +node { + calculator: "DetectionsToTimedBoxListCalculator" + input_stream: "DETECTIONS:detections_with_id" + output_stream: "BOXES:start_pos" +} + +# Subgraph that tracks boxes (see box_tracking_gpu.pbtxt). +node { + calculator: "BoxTrackingSubgraphGpu" + input_stream: "VIDEO:input_video" + input_stream: "BOXES:start_pos" + input_stream: "CANCEL_ID:cancel_object_id" + output_stream: "BOXES:boxes" +} + +# Managers new detected objects and objects that are being tracked. +# It associates the duplicated detections and updates the locations of +# detections from tracking. +node: { + calculator: "TrackedDetectionManagerCalculator" + input_stream: "DETECTIONS:detections_with_id" + input_stream: "TRACKING_BOXES:boxes" + output_stream: "DETECTIONS:tracked_detections" + output_stream: "CANCEL_OBJECT_ID:cancel_object_id" + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "TRACKING_BOXES" + } + sync_set { + tag_index: "DETECTIONS" + } + } + } + } +} diff --git a/mediapipe/graphs/tracking/subgraphs/renderer_cpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/renderer_cpu.pbtxt new file mode 100644 index 0000000..665126a --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/renderer_cpu.pbtxt @@ -0,0 +1,29 @@ +# MediaPipe object tracking rendering subgraph. + +type: "RendererSubgraphCpu" + +input_stream: "IMAGE:input_image" +input_stream: "DETECTIONS:detections" +output_stream: "IMAGE:output_image" + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + render_detection_id: true + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_image" + input_stream: "detections_render_data" + output_stream: "IMAGE:output_image" +} diff --git a/mediapipe/graphs/tracking/subgraphs/renderer_gpu.pbtxt b/mediapipe/graphs/tracking/subgraphs/renderer_gpu.pbtxt new file mode 100644 index 0000000..e94fb6d --- /dev/null +++ b/mediapipe/graphs/tracking/subgraphs/renderer_gpu.pbtxt @@ -0,0 +1,29 @@ +# MediaPipe object tracking rendering subgraph. + +type: "RendererSubgraphGpu" + +input_stream: "IMAGE:input_image" +input_stream: "DETECTIONS:detections" +output_stream: "IMAGE:output_image" + +# Converts the detections to drawing primitives for annotation overlay. +node { + calculator: "DetectionsToRenderDataCalculator" + input_stream: "DETECTIONS:detections" + output_stream: "RENDER_DATA:detections_render_data" + node_options: { + [type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] { + thickness: 4.0 + color { r: 255 g: 0 b: 0 } + render_detection_id: true + } + } +} + +# Draws annotations and overlays them on top of the input images. +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE_GPU:input_image" + input_stream: "detections_render_data" + output_stream: "IMAGE_GPU:output_image" +} diff --git a/mediapipe/graphs/youtube8m/BUILD b/mediapipe/graphs/youtube8m/BUILD new file mode 100644 index 0000000..7318a8c --- /dev/null +++ b/mediapipe/graphs/youtube8m/BUILD @@ -0,0 +1,73 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "yt8m_feature_extraction_calculators", + deps = [ + "//mediapipe/calculators/audio:audio_decoder_calculator", + "//mediapipe/calculators/audio:basic_time_series_calculators", + "//mediapipe/calculators/audio:mfcc_mel_calculators", + "//mediapipe/calculators/audio:rational_factor_resample_calculator", + "//mediapipe/calculators/audio:spectrogram_calculator", + "//mediapipe/calculators/audio:stabilized_log_calculator", + "//mediapipe/calculators/audio:time_series_framer_calculator", + "//mediapipe/calculators/core:add_header_calculator", + "//mediapipe/calculators/core:matrix_multiply_calculator", + "//mediapipe/calculators/core:matrix_subtract_calculator", + "//mediapipe/calculators/core:matrix_to_vector_calculator", + "//mediapipe/calculators/core:packet_cloner_calculator", + "//mediapipe/calculators/core:packet_resampler_calculator", + "//mediapipe/calculators/tensorflow:image_frame_to_tensor_calculator", + "//mediapipe/calculators/tensorflow:matrix_to_tensor_calculator", + "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator", + "//mediapipe/calculators/tensorflow:string_to_sequence_example_calculator", + "//mediapipe/calculators/tensorflow:tensor_squeeze_dimensions_calculator", + "//mediapipe/calculators/tensorflow:tensor_to_matrix_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_inference_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_session_from_frozen_graph_calculator", + "//mediapipe/calculators/tensorflow:unpack_media_sequence_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + ], +) + +cc_library( + name = "yt8m_inference_calculators_deps", + deps = [ + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/core:dequantize_byte_array_calculator", + "//mediapipe/calculators/core:packet_cloner_calculator", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/calculators/core:string_to_int_calculator", + "//mediapipe/calculators/tensorflow:lapped_tensor_buffer_calculator", + "//mediapipe/calculators/tensorflow:string_to_sequence_example_calculator", + "//mediapipe/calculators/tensorflow:tensor_to_vector_float_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_inference_calculator", + "//mediapipe/calculators/tensorflow:tensorflow_session_from_saved_model_calculator", + "//mediapipe/calculators/tensorflow:tfrecord_reader_calculator", + "//mediapipe/calculators/tensorflow:unpack_media_sequence_calculator", + "//mediapipe/calculators/tensorflow:unpack_yt8m_sequence_example_calculator", + "//mediapipe/calculators/tensorflow:vector_float_to_tensor_calculator", + "//mediapipe/calculators/tensorflow:vector_int_to_tensor_calculator", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:labels_to_render_data_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/calculators/util:top_k_scores_calculator", + "//mediapipe/calculators/video:opencv_video_decoder_calculator", + "//mediapipe/calculators/video:opencv_video_encoder_calculator", + ], +) diff --git a/mediapipe/graphs/youtube8m/feature_extraction.pbtxt b/mediapipe/graphs/youtube8m/feature_extraction.pbtxt new file mode 100644 index 0000000..89d1053 --- /dev/null +++ b/mediapipe/graphs/youtube8m/feature_extraction.pbtxt @@ -0,0 +1,295 @@ +input_side_packet: "input_sequence_example" +input_side_packet: "inception3_pca_mean_matrix" +input_side_packet: "inception3_pca_projection_matrix" +input_side_packet: "vggish_pca_mean_matrix" +input_side_packet: "vggish_pca_projection_matrix" +output_side_packet: "sequence_example_to_serialize" + +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "STRING:input_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" +} + +node { + calculator: "UnpackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "DATA_PATH:input_file" + output_side_packet: "RESAMPLER_OPTIONS:packet_resampler_options" + output_side_packet: "AUDIO_DECODER_OPTIONS:audio_decoder_options" + node_options: { + [type.googleapis.com/mediapipe.UnpackMediaSequenceCalculatorOptions]: { + base_packet_resampler_options { + frame_rate: 1.0 + base_timestamp: 0 + } + base_audio_decoder_options { + audio_stream { stream_index: 0 } + } + } + } +} + +# Decode the entire video. +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_file" + output_stream: "VIDEO:decoded_frames" +} + +# Extract the subset of frames we want to keep. +node { + calculator: "PacketResamplerCalculator" + input_stream: "decoded_frames" + output_stream: "sampled_decoded_frames" + input_side_packet: "OPTIONS:packet_resampler_options" +} + +node { + calculator: "ImageFrameToTensorCalculator" + input_stream: "sampled_decoded_frames" + output_stream: "tensor_frame" +} + +node { + calculator: "TensorFlowSessionFromFrozenGraphCalculator" + output_side_packet: "SESSION:session" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowSessionFromFrozenGraphCalculatorOptions]: { + graph_proto_path: "/tmp/mediapipe/classify_image_graph_def.pb" + tag_to_tensor_names { + key: "IMG_UINT8" + value: "DecodeJpeg:0" + } + tag_to_tensor_names { + key: "INCEPTION_POOL3" + value: "pool_3/_reshape:0" + } + } + } +} + +node { + calculator: "TensorFlowInferenceCalculator" + input_side_packet: "SESSION:session" + input_stream: "IMG_UINT8:tensor_frame" + output_stream: "INCEPTION_POOL3:inception3_hidden_activation_single_element_batch" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: { + signature_name: "" + batch_size: 1 + add_batch_dim_to_tensors: false + } + } +} + +# Remove the batch dimension. +node: { + calculator: "TensorSqueezeDimensionsCalculator" + input_stream: "inception3_hidden_activation_single_element_batch" + output_stream: "inception3_hidden_activation" + node_options: { + [type.googleapis.com/mediapipe.TensorSqueezeDimensionsCalculatorOptions]: { + dim: 0 + } + } +} + +node { + calculator: "TensorToMatrixCalculator" + input_stream: "TENSOR:inception3_hidden_activation" + output_stream: "MATRIX:inception3_hidden_activation_matrix" +} + +node { + calculator: "MatrixSubtractCalculator" + input_stream: "MINUEND:inception3_hidden_activation_matrix" + input_side_packet: "SUBTRAHEND:inception3_pca_mean_matrix" + output_stream: "mean_subtracted_inception3_matrix" +} +node { + calculator: "MatrixMultiplyCalculator" + input_stream: "mean_subtracted_inception3_matrix" + input_side_packet: "inception3_pca_projection_matrix" + output_stream: "pca_inception3_matrix" +} +node { + calculator: "MatrixToVectorCalculator" + input_stream: "pca_inception3_matrix" + output_stream: "pca_inception3_vf" +} + +######################## END OF VISUAL ########################### + +######################## BEGIN OF AUDIO ########################## +node { + calculator: "AudioDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_file" + input_side_packet: "OPTIONS:audio_decoder_options" + output_stream: "AUDIO:audio" + output_stream: "AUDIO_HEADER:audio_header" +} + +node { + calculator: "AddHeaderCalculator" + input_stream: "DATA:audio" + input_stream: "HEADER:audio_header" + output_stream: "media_audio" +} + +# Always convert the audio to mono. +node { + calculator: "AverageTimeSeriesAcrossChannelsCalculator" + input_stream: "media_audio" + output_stream: "mono_waveform" +} + +node { + calculator: "RationalFactorResampleCalculator" + input_stream: "mono_waveform" + output_stream: "resampled_waveform" + node_options: { + [type.googleapis.com/mediapipe.RationalFactorResampleCalculatorOptions] { + target_sample_rate: 16000.0 + } + } +} +node { + calculator: "SpectrogramCalculator" + input_stream: "resampled_waveform" + output_stream: "spectrogram_squared_magnitude" + node_options: { + [type.googleapis.com/mediapipe.SpectrogramCalculatorOptions] { + frame_duration_seconds: 0.025 + frame_overlap_seconds: 0.015 + output_type: SQUARED_MAGNITUDE + } + } +} +node { + calculator: "MelSpectrumCalculator" + # MelSpectrumCalculator expects SQUARED_MAGNITUDE input, but its output is in + # linear magnitude units. + input_stream: "spectrogram_squared_magnitude" + output_stream: "mel_spectrum_magnitude" + node_options: { + [type.googleapis.com/mediapipe.MelSpectrumCalculatorOptions] { + # Follow the 'wideband' or '16kHz' speech convention. + channel_count: 64 + min_frequency_hertz: 125.0 + max_frequency_hertz: 7500.0 + } + } +} +node { + calculator: "StabilizedLogCalculator" + input_stream: "mel_spectrum_magnitude" + output_stream: "log_mel_spectrum_magnitude" + node_options: { + [type.googleapis.com/mediapipe.StabilizedLogCalculatorOptions] { + stabilizer: 0.01 + } + } +} +node { + calculator: "TimeSeriesFramerCalculator" + input_stream: "log_mel_spectrum_magnitude" + output_stream: "log_mel_spectrum_magnitude_with_context" + node_options: { + [type.googleapis.com/mediapipe.TimeSeriesFramerCalculatorOptions] { + frame_duration_seconds: 0.96 + frame_overlap_seconds: -0.04 + } + } +} +node { + calculator: "MatrixToTensorCalculator" + input_stream: "log_mel_spectrum_magnitude_with_context" + output_stream: "log_mel_spectrum_magnitude_tensor" + node_options: { + [type.googleapis.com/mediapipe.MatrixToTensorCalculatorOptions] { + transpose: true + } + } +} + +node { + calculator: "TensorFlowSessionFromFrozenGraphCalculator" + output_side_packet: "SESSION:vggish_session" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowSessionFromFrozenGraphCalculatorOptions]: { + graph_proto_path: "/tmp/mediapipe/vggish_new.pb" + tag_to_tensor_names { + key: "INPUT" + value: "vggish/input_features:0" + } + tag_to_tensor_names { + key: "VGGISH" + value: "vggish/fc2/BiasAdd:0" + } + } + } +} + +node { + calculator: "TensorFlowInferenceCalculator" + input_side_packet: "SESSION:vggish_session" + input_stream: "INPUT:log_mel_spectrum_magnitude_tensor" + output_stream: "VGGISH:vggish_tensor" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: { + signature_name: "" + batch_size: 128 + } + } +} + +node { + calculator: "TensorToMatrixCalculator" + input_stream: "REFERENCE:log_mel_spectrum_magnitude_with_context" + input_stream: "TENSOR:vggish_tensor" + output_stream: "MATRIX:vggish_matrix" + node_options: { + [type.googleapis.com/mediapipe.TensorToMatrixCalculatorOptions] { + time_series_header_overrides { + num_channels: 128 + num_samples: 1 + } + } + } +} + +node { + calculator: "MatrixSubtractCalculator" + input_stream: "MINUEND:vggish_matrix" + input_side_packet: "SUBTRAHEND:vggish_pca_mean_matrix" + output_stream: "mean_subtracted_vggish_matrix" +} +node { + calculator: "MatrixMultiplyCalculator" + input_stream: "mean_subtracted_vggish_matrix" + input_side_packet: "vggish_pca_projection_matrix" + output_stream: "pca_vggish_matrix" +} +node { + calculator: "MatrixToVectorCalculator" + input_stream: "pca_vggish_matrix" + output_stream: "pca_vggish_vf" +} + +# Store the features in the SequenceExample. +node { + calculator: "PackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" + input_stream: "FLOAT_FEATURE_RGB:pca_inception3_vf" + input_stream: "FLOAT_FEATURE_AUDIO:pca_vggish_vf" +} + +# Serialize the SequenceExample to a string for storage. +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:sequence_example_to_serialize" + output_side_packet: "STRING:output_sequence_example" +} + diff --git a/mediapipe/graphs/youtube8m/label_map.txt b/mediapipe/graphs/youtube8m/label_map.txt new file mode 100644 index 0000000..a3ed470 --- /dev/null +++ b/mediapipe/graphs/youtube8m/label_map.txt @@ -0,0 +1,3862 @@ +Game +Video game +Vehicle +Concert +Musician +Cartoon +Performance art +Car +Dance +Guitar +String instrument +Food +Association football +Musical ensemble +Music video +Animal +Animation +Motorsport +Pet +Racing +Recipe +Mobile phone +Cooking +Smartphone +Gadget +Trailer (promotion) +Toy +Minecraft +Drum kit +Cuisine +Motorcycle +Piano +Dish (food) +Drum +Acoustic guitar +Action-adventure game +Call of Duty +Electric guitar +Drummer +Cosmetics +Keyboard instrument +Choir +Strategy video game +Fishing +Aircraft +Train +Airplane +Pianist +Sports car +Art +Hair +Rail transport +Basketball +Cycling +Orchestra +Motorcycling +Transport +Musical keyboard +Bicycle +Fish +Outdoor recreation +Disc jockey +Machine +Sports game +Radio-controlled model +Hairstyle +Fashion +Dog +Skateboarding +Fighting game +Basketball moves +Wedding +Skateboard +IPhone +Personal computer +Truck +Boat +Railroad car +Snare drum +American football +Drawing +Pokémon +Winter sport +Tractor +Naruto +Grand Theft Auto V +Cymbal +Horse +House +Festival +Engine +Highlight film +Boxing +World of Warcraft +Call of Duty: Black Ops II +Four-wheel drive +Bird +Violin +Skateboarding trick +Christmas +Weight training +Recreational fishing +Warcraft +Ice skating +Driving +Video game console +Microsoft Windows +Airline +Pokémon (video game series) +Landing +Combat +League of Legends +Vegetable +Model aircraft +Airliner +Samsung Galaxy +Sport utility vehicle +Electronic keyboard +Hockey +Radio-controlled aircraft +??? +Eye shadow +Cooking show +Dessert +Battlefield (series) +Slam dunk +Plant +Painting +Drifting (motorsport) +Rallying +Lego +Tablet computer +Call of Duty: Modern Warfare 2 +Comedy (drama) +Grand Theft Auto: San Andreas +Off-road vehicle +The Walt Disney Company +Locomotive +Takeoff +RuneScape +Puppy +Amusement park +Call of Duty: Modern Warfare 3 +Motocross +Dragon Ball +Airport +Photography +Call of Duty: Black Ops +Shoe +Radio-controlled car +Sonic the Hedgehog +Skatepark +Bride +First-person shooter +Accordion +Jet aircraft +Mascara +Halo (series) +Camera +Final Fantasy +Skiing +Gym +Aviation +Mountain bike +Marching band +??? +Extreme sport +FIFA 15 +Brass instrument +Sasuke Uchiha +Cat +Sedan (automobile) +Pickup truck +Meat +BMW +Parade +Cake +Supercar +Aquarium +Weather +Weapon +Nail (anatomy) +Surfing +PlayStation 3 +Room +Call of Duty 4: Modern Warfare +Helicopter +Laptop +Saxophone +Star Wars +Goku +Hotel +Xbox 360 +Arcade game +Doll +News presenter +Exhaust system +Volkswagen +Hatchback +Action figure +Computer +Carnival +Lipstick +Wii +Sonic the Hedgehog (character) +School +Ballet +Eye liner +Heavy equipment +IPad +Running +Baking +Rapid transit +Coupé +Road bicycle +Card game +Nail polish +Playing card +Bus +Counter-Strike (video game) +Gardening +Outline of meals +Nail art +Tank +??? +Bollywood +Tennis +Ship +BMX bike +Drink +Grand Theft Auto IV +Snowboarding +Mountain biking +Rouge (cosmetics) +Super Smash Bros. +??? +Street Fighter +Stadium +Underwater +Hunting +Kickflip +Metin2 +The Sims +Viola +Pony +PlayStation 4 +Television +??? +Beach +Manicure +Chocolate +Wood +Snow +Sneakers +??? +Roller coaster +Afro-textured hair +Timbales +Need for Speed +Robot +Paper +Gymnastics +Farm +Diatonic button accordion +Fighter aircraft +Sketch (drawing) +Mercedes-Benz +Chevrolet +Batman +Loudspeaker +Tool +Nike, Inc. +Race track +Ski +Underwater diving +Computer hardware +Garden +Paint +Cello +Digital camera +Scooter (motorcycle) +Motorboat +Harry Potter +??? +GoPro +Assassin's Creed +Fishing rod +Battlefield 3 +IPod +Nature +Dota 2 +Tree +My Little Pony +Dress +Xbox One +Train station +Firefighter +Jeep +Rail transport modelling +Resort +Flute +Touhou Project +Fruit +Chicken as food +Knife +Dashcam +Clash of Clans +Kitchen +Slide show +The Legend of Zelda +Fireworks +Swimming pool +Rugby football +Building +Kitten +Television advertisement +??? +Battlefield 4 +Horse racing +MapleStory +Subwoofer +Flour +IPod Touch +World of Tanks +Music festival +Comedian +Figurine +Kingdom Hearts +Manga +Wrestling +Trumpet +Xbox +Model (person) +Jumping +Dough +FIFA 13 +Pro Evolution Soccer +Resident Evil +Eye +Guitar Hero +Enduro +Home appliance +News program +Watch +Audi +Off-road racing +Ice dancing +Construction +Organ (music) +PlayStation Portable +Figure skating +Fiddle +WWE 2K +Climbing +Spider-Man +Braid +Muscle +The Elder Scrolls V: Skyrim +Nintendo 3DS +Fire +Human swimming +BMW Motorrad +One Piece +Wildlife +Apartment +Dressage +Scuba diving +Call of Duty: Ghosts +Eating +Kickboxing +Egg as food +Origami +The Elder Scrolls +Ford Mustang +Fishing lure +Light +Running back +Air force +M.U.G.E.N +Transformers +Living room +Soldier +Bag +Ballroom dance +Gohan +Kayak +Sheet music +Destiny (video game) +Wall +Church (building) +Sewing +Chipmunk +Surfboard +Concealer +Drag racing +Mega Man +Walt Disney World +Chicken +Parachuting +Classic car +Furniture +Jewellery +Recreational vehicle +Call of Duty: Advanced Warfare +Street Fighter IV +Sakura Haruno +Restaurant +Halo 3 +Wheelie +Mario Kart +Headphones +Factory +Yu-Gi-Oh! Trading Card Game +Speedometer +Circus +Muscle car +Bedroom +Tekken +Graffiti +River +Lighting +Guitar amplifier +Knitting +Call of Duty: Zombies +PlayStation +Radio-controlled helicopter +Cookware and bakeware +Trail +Camping +University +Indian cuisine +Multiplayer online battle arena +Ball +Nightclub +Book +Lego minifigure +PlayStation 2 +Dodge +Garry's Mod +Camera lens +Hockey puck +Barbie +Thomas the Tank Engine +Go-kart +Vegetarian cuisine +Monster High +Yacht +Collectible card game +Auto Race (Japanese sport) +Role-playing game +Madden NFL +Unidentified flying object +Longboard (skateboard) +Toddler +Digital single-lens reflex camera +Xbox (console) +Rail freight transport +Honda Civic +Convertible +The Sims 2 +Lamborghini +Printer (computing) +Cream +Parrot +Tire +Quadcopter +Littlest Pet Shop +Wii U +Planet +??? +The Sims 3 +Sony Xperia +Salad +Sailboat +Cruise ship +Unmanned aerial vehicle +Naruto: Ultimate Ninja +Barbecue +Mortal Kombat +Slot machine +Longboarding +Halo: Reach +Paragliding +Bread +Monster Hunter +Stitch (textile arts) +Dofus +StarCraft II: Wings of Liberty +Game controller +Gears of War +Mud bogging +Snowboard +Synthesia +Wig +Road bicycle racing +Wheel +Macintosh +Home improvement +Printing +Insect +Road +Parachute +Cattle +Hair coloring +IPhone 4S +Advertising +Potato +Runway +Van +Zoo +Handheld game console +Water +Rock Band +Volkswagen Golf +Bathroom +Stunt performer +Bleach (manga) +Metal Gear +Santa Claus +Hiking +Samsung Electronics +Runway (fashion) +Elevator +Cricket +Gran Turismo (series) +Fire engine +Kinder Surprise +Play-Doh +Grilling +Eyelash +Table tennis +Fiat Automobiles +Dragon +Lion +Nintendo Entertainment System +PlayStation (console) +Stallion +Ice skate +Baseball park +Flamenco +Steam engine +Plough +Farming Simulator +Soup +Snowmobile +Mare +Counter-Strike: Source +Sail +Squat (exercise) +Bass (fish) +Banjo +Harmonica +Quartet +Drum stick +IPhone 5 +Reptile +Prayer +T-shirt +Talent show +Rice +Roasting +Diablo III +CrossFire (video game) +Renault +Pizza +Trombone +Chevrolet Camaro +Barbell +Ryu (Street Fighter) +Clay +Beyblade +Lake +Sauce +??? +Cube +Forza (series) +Cookie +Taiko no Tatsujin +Mixtape +Medicine +Door +Monster +Call of Duty: World at War +Mud +Computer keyboard +Clarinet +Defense of the Ancients +Sora (Kingdom Hearts) +Computer monitor +Super Street Fighter IV +PlayStation Vita +Guild Wars +Album +Model car +Tenor saxophone +The Twilight Saga (film series) +Rubik's Cube +Sailor Moon +Teacher +Mixing console +Card manipulation +Combine harvester +Boeing 737 +Bull +Fish as food +Cheese +Concrete +Board game +Moped +Puzzle +Lego Star Wars +Poker +Portrait +Luigi +Dining room +Pokémon X and Y +Floor +Asus +Inuyasha +Livestock +Lawn mower +Tibia (video game) +Tabletop game +Iron Man +Tomato +Juice +Final Fantasy VII +Lip gloss +Super Smash Bros. Melee +Central processing unit +Sitcom +Cockpit +Emergency vehicle +FIFA 12 +Bodyboarding +Earth +The Lego Group +Ice cream +Microphone +Rallycross +Website +Table (furniture) +Ice +Magic: The Gathering +Ninja +Darth Vader +Saw +Mickey Mouse +Handbag +The King of Fighters +Ballet dancer +Samsung Galaxy Note series +Washing machine +Zee TV +Point Blank (2008 video game) +Gibson Les Paul +Dune buggy +DayZ (video game) +Television set +Dirt track racing +Edward Cullen +Beauty salon +Hetalia: Axis Powers +Vampire +Gliding +Batman: Arkham +Mountain +Rain +Shark +Waterfall +DarkOrbit +Bagpipes +Comics +Rock climbing +Skin +Arena +IPhone 4 +ARMA (series) +Super Smash Bros. for Nintendo 3DS and Wii U +Curry +Pasta +Halo 4 +Superman +Icing (food) +Google Nexus +Marathon +Deer +Guitar Hero III: Legends of Rock +Balloon +Goalkeeper (association football) +Red Bull +Nissan GT-R +Noodle +Fishing bait +Pencil +Plants vs. Zombies +Athlete +Computer case +Stretching +Terrier +Outer space +Textile +Mercedes-AMG +Hard disk drive +Biceps +Handball +Land Rover +Kamen Rider Series +Parakeet +Bear +Rim (wheel) +Chevrolet Corvette +Battery (electricity) +Milk +Roblox +BMW M3 +Christmas decoration +Moon +Microsoft Lumia +Combat Arms (video game) +Maize +Cargo +Headset (audio) +Bee +Helmet +Street art +Clown +Tattoo +Cupcake +Traxxas +Money +Hatsune Miku: Project DIVA +Bead +Angry Birds +Movieclips +Optimus Prime +MacBook +Mass Effect +Bowser (character) +Sega Genesis +Pachinko +Jedi +Jeep Wrangler +Dragon Ball Z: Budokai Tenkaichi +Tales (series) +Loader (equipment) +Water park +Beef +Sewing machine +Beer +Glass +Silage +Seafood +Gran Turismo 5 +Harp +Joker (comics) +Volkswagen Beetle +??? +BlackBerry +AdventureQuest Worlds +Bowling +Guild Wars 2 +Dragon Quest +Washing +Mermaid +Cue stick +Boot +Stir frying +Grand Theft Auto: Vice City +Penguin +Acrylic paint +Cocktail +Kingdom Hearts II +Coral +Borderlands 2 +Telephone +Gears of War (video game) +Far Cry +Tractor pulling +Rock Band (video game) +Crane (machine) +Updo +Stuffed toy +Lawn +Tekken (video game) +Airbus A320 family +IPhone 5S +Watercolor painting +Ten-pin bowling +Duck +Pokémon Trading Card Game +Oven +Subaru Impreza +Porsche 911 +Backpack +Carl Johnson (Grand Theft Auto) +German Shepherd +Turtle +Metal +Left 4 Dead +Ultralight aviation +Comic book +Batting (cricket) +Tram +Mower +Reef aquarium +??? +Swing (dance) +Lego City +Game Boy Advance +Diesel engine +Pitcher +Dance studio +Hamburger +Cake decorating +Left 4 Dead 2 +Bible +Candy +Vacuum cleaner +Pokémon Omega Ruby and Alpha Sapphire +Sowing +Roof +Donkey Kong +Trout +Coin +Tent +Digimon +Costume +Warface +Sandwich +BMW 3 Series +Star Wars: The Old Republic +Trampoline +Pipe organ +Latin dance +Aerobics +Aion: Upheaval +Supermoto +Netbook +Gift +Strum +Mitsubishi Lancer Evolution +Drum and bugle corps (modern) +Gramophone record +Gundam (mobile suit) +Euro Truck Simulator 2 +Tai chi +Teenage Mutant Ninja Turtles +Aerobatics +Wedding dress +Hair conditioner +Achievement (video gaming) +Boeing 777 +Shadow the Hedgehog +Boeing 747 +Simba +Silkroad Online +Kindergarten +Smartwatch +Computer mouse +Bell +Museum +Rabbit +Total War (series) +DVD +Devil May Cry +Face +Lathe +Five Nights at Freddy's +Logging +String quartet +Bridge +Super Mario Bros. +Fishing reel +Badminton +Clock +Stove +Wine +Subaru +Leather +IPad 2 +Terraria +Attack on Titan +Bottle +Kick +Police officer +Raw foodism +Video card +Alpine skiing +String (music) +StarCraft (video game) +Roadster (automobile) +Steak +Hearthstone (video game) +Solo dance +Foreign exchange market +God of War (series) +Hulk (comics) +Easter egg +Ceiling +Yo-kai Watch +Wakeboarding +Monster truck +McDonald's +Assassin's Creed III +Chopper (motorcycle) +Largemouth bass +Roller skating +Glider (aircraft) +Jacket +Marimba +Christmas tree +Sand +Afro +MacBook Pro +Booster pack +Dark Souls II +Bartender +Quarterback +Illustration +ARMA 2 +Star Trek +Itachi Uchiha +Hot rod +Saints Row +Freeza +Need for Speed: Most Wanted (2012 video game) +Hair twists +Super Mario World +Crash Bandicoot +Pork +Shampoo +Mask +Hair iron +Marvel vs. Capcom +Castlevania +Halo 2 +Battery charger +Tower defense +BBC +Kawasaki motorcycles +Link (The Legend of Zelda) +Muffler +Nintendo 64 +Marriage proposal +Fingerboard (skateboard) +Beehive +Pokémon HeartGold and SoulSilver +Bowling ball +Tower of Saviors +Artificial nails +Final Fantasy XIII +Chair +Hijab +Juggling +Nissan Skyline +Anpanman +Car wash +Kite +Diablo (video game) +Resident Evil 4 +Candy Crush Saga +Rocket +Video game arcade cabinet +Whale +Glider (sailplane) +Flooring +Kingdom Hearts (video game) +??? +Fast food +Mandolin +Metal detector +Cinema 4D +Ash Ketchum +Router (computing) +Yamaha YZF-R1 +Uncharted +DC Comics +Egg +Lexus +Ollie (skateboarding) +Hamster +Chainsaw +Galaxy +Embroidery +Suite (hotel) +Brush +Electronic drum +Gran Turismo 6 +NBA 2K15 +Dolphin +Salmon +Window +Drill +Pen +Backpacking (wilderness) +Torte +Web page +Dreadlocks +Hot Wheels +Brake +Tuba +Volcano +Ibiza +Dragon Age +Mini +Perfect World (video game) +Knot +Tails (character) +Thunderstorm +Video camera +Smoothie +Crossover (automobile) +Condominium +Desert +Pump +Strawberry +Coffeemaker +The Legend of Zelda: Ocarina of Time +Tarot +Architecture +Portal (video game) +Dynasty Warriors +Lightning McQueen +Pirates of the Caribbean (film series) +Tile +Battlefield: Bad Company 2 +Sketch comedy +Aikido +V8 engine +Sailor Moon (character) +Lamborghini Aventador +Carp fishing +Kirby (series) +Banana +Police car +Laser lighting display +Necklace +??? +WWE '13 +Mini (marque) +Tanki Online +Oil +Radio-controlled boat +Dinosaur +Pie +President of the United States +NBA 2K14 +Labrador Retriever +Blender +Plarail +Captain America +Electric locomotive +Street racing +Need for Speed: Most Wanted (2005 video game) +Canoe +Golf club +Sheep +Bar +CDJ +Lace +Gold +Glove +Halo: Combat Evolved +Alphabet +Fender Telecaster +IPhone 3GS +Beadwork +Personal water craft +Dietary supplement +James Bond +Ragnarok Online +French braid +Road racing +Star +Dean Winchester +Snake +Seed +Christmas lights +Plaster +Trunks (Dragon Ball) +Forage harvester +Cartoon Network +Honda CBR series +Battlefield Hardline +Tekken 6 +Glitter +Ford Focus +Roland V-Drums +Ski-Doo +Tyrannosaurus +New Super Mario Bros. +Cue sports +Rainbow Loom +Samsung Galaxy S III +Glasses +Italian cuisine +RollerCoaster Tycoon 3 +Pig +Lock (security device) +The Lord of the Rings (film series) +Military parade +Elephant +Pull-up (exercise) +Eyelash extensions +Ring (jewellery) +Minivan +Coca-Cola +Mural +Love song +Portal 2 +Mortal Kombat (2011 video game) +Yarn +Pokémon Ruby and Sapphire +Dragon Nest +Japanese cuisine +Resident Evil 5 +Jeans +Map +Pikachu +Sun +Pond +Bulldog +Greenhouse +Škoda Auto +Baby transport +Apple +The Doctor (Doctor Who) +Turbine +Naruto: Ultimate Ninja Storm +Watch Dogs +VHS +Ariel (Disney) +Sculpture +Bulldozer +Transformice +Sushi +Home run +Fountain +Slopestyle +Fullmetal Alchemist +Ultimate Marvel vs. Capcom 3 +Automotive lighting +Lightsaber +Chevrolet Silverado +Honey +Wangan Midnight +Sword +Toilet +Super Mario Galaxy +Akuma (Street Fighter) +Shiva +Bed +Toy train +Manufacturing +Ram Trucks +Stuffing +Biscuit +Kia Motors +Spa +Samsung Galaxy S II +Demolition +Airbus A330 +Breakfast +Airbus A380 +Pancake +Kawasaki Ninja +Mitsubishi Lancer +Mushroom +Grand Theft Auto: The Lost and Damned +Microsoft Flight Simulator +Spacecraft +Logo +Stock car racing +Goat +Pool (cue sports) +Assassin's Creed (video game) +Majin Boo +Vespa +??? +Samsung Galaxy S4 +Assassin's Creed IV: Black Flag +Batman: Arkham City +Monkey +Death Note +WWE 2K15 +Pumpkin +Shopping mall +Rose +Cola +Minnie Mouse +Caporales +Jet Ski +World of Warcraft: Wrath of the Lich King +Winter +Prom +Karaoke box +Minibike +RFactor +Art exhibition +Plush +Chocolate cake +Ford F-Series +Soap +Knuckles the Echidna +Dump truck +Giant panda +Dance Dance Revolution +Princess +Street food +Flashlight +Animal Crossing +Pilates +Pipe band +Toyota Land Cruiser +Lara Croft +Jumbotron +Ferrari F430 +Cell (Dragon Ball) +BMW 3 Series (E36) +Injustice: Gods Among Us +Dumbbell +Samsung Galaxy Tab series +Bodyweight exercise +Penalty kick (association football) +Lizard +City +Bionicle +Kirby (character) +WWE 2K14 +Pokémon Battle Revolution +Sonic the Hedgehog (1991 video game) +Alliance of Valiant Arms +Racket (sports equipment) +K-1 +Acer Inc. +Recorder (musical instrument) +Earring +National park +The Elder Scrolls IV: Oblivion +Audi R8 +Clothes dryer +Military band +Silver +Warcraft III: Reign of Chaos +Classroom +Samsung Galaxy S5 +Black cat +Scarf +Kratos (God of War) +Skylanders +Super Robot Wars +Electric car +Video lesson +Smoking (cooking) +Antenna (radio) +Sonic Generations +Butter +Chess +Hello Kitty +Goldfish +Carrot +Blu-ray +Squirrel +Balloon (aeronautics) +Microwave oven +Range Rover +Wool +TalesRunner +IPad Mini +Pokémon Emerald +Inflatable boat +Bull riding +Football boot +Gears of War 2 +Bugatti Veyron +Airbrush +Brick +Avengers (comics) +Plants vs. Zombies 2: It's About Time +United States Navy +Ball (association football) +Volkswagen Gol +Yo-yo +Forza Motorsport 4 +Logitech +Shirt +Golden Retriever +Alarm device +Water slide +Paramotor +Fondant icing +Acrobatic gymnastics +Coach (sport) +The Witcher 3: Wild Hunt +Tabla +Kinect +Zee Bangla +??? +Cabinetry +Quilt +Claw crane +Spyro (series) +Yoshi +Tekken Tag Tournament 2 +Diamond +Samsung Galaxy S series +BMW 3 Series (E46) +Tiger +Number +Traffic +Metalworking +Haruhi Suzumiya +Gown +Luxury yacht +Yuna (Final Fantasy) +Station wagon +Softball +The Legend of Zelda: Twilight Princess HD +Dungeon Fighter Online +Plasticine +LG Optimus series +Source (game engine) +Battlefield 2 +BMW 3 Series (E30) +Ink +Half-Life 2 +Hitman (series) +Inline skates +Remote control +Mercedes-Benz C-Class +The Sims 4 +Harlem Shake (meme) +Magic Kingdom +Dune +Prince of Persia +Final Fantasy XIV +Marvel Universe +Draco Malfoy +Ram Pickup +DC Universe Online +Assassin's Creed II +Mars +Xylophone +Dragon Age: Inquisition +Game Boy +Carpet +Roxas (Kingdom Hearts) +Balance beam +Mass Effect 2 +Dragon Ball Xenoverse +Call of Duty: Black Ops – Zombies +Cadillac +Guinea pig +The Hobbit (film series) +Need for Speed: World +Pastry +Chapel +Rayman +Armour +Mouse +Assassin's Creed: Brotherhood +Lord Voldemort +Magnet +The Sims (video game) +Rubber band +Grocery store +Reborn doll +Ford GT +WWE '12 +PlanetSide 2 +Jaguar Cars +Volvo Cars +Jeep Cherokee (SJ) +Homer Simpson +USB flash drive +Torero +Persona (series) +Model railroad layout +Buttercream +Serve (tennis) +Ferrari 458 +Honda Accord +Chevrolet Impala +Command & Conquer +Warframe +Chrysler (brand) +Standup paddleboarding +Pretty Cure +Campsite +Final Fantasy VIII +Audi A4 +Sailing ship +Rafting +Custom car +Belle (Disney) +Rowing (sport) +Jeep Grand Cherokee +Wire +BMW M5 +Hula hoop +Pinball +Spaghetti +Monster Hunter Freedom Unite +Far Cry 4 +Pro Evolution Soccer 2015 +Test Drive (series) +Motorcycle helmet +Router (woodworking) +Cave +Cheesecake +Birthday cake +Suzuki Jimny +New Super Mario Bros. Wii +Ezio Auditore da Firenze +Fisherman +Mime artist +Roller skates +Pump It Up (video game series) +Dissidia Final Fantasy +Supercharger +Gemstone +Titanfall +Downhill +Medal +Garbage truck +Forehand +Heroes of Newerth +Plastic +??? +Astronaut +Guitar Hero World Tour +ArcheAge +Lowrider +Police dog +Toyota Corolla +Ford Fiesta +Helmet camera +Cabal Online +Assassin's Creed Unity +Ceramic +Kidō Senshi Gundam: Senjō no Kizuna +Hot air balloon +Shower +Donald Duck +Multi Theft Auto +Rock Band 3 +Porsche 911 GT3 +Stick figure +Sled +Lemon +Frog +Mexican Creole hairless pig +Forklift +Dog agility +Kettlebell +Shelby Mustang +Candle +Bowling (cricket) +Kick (football) +Electric vehicle +Oboe +Desktop computer +Wing Chun +Statue +DayZ (mod) +Eagle +Fire station +Nike Air Max +Rage (video game) +Woodturning +Fireplace +Volkswagen Jetta +Madison Square Garden +Fly tying +Spore (2008 video game) +Hammond organ +Sam Winchester +The Pink Panther +Saints Row: The Third +Cherry blossom +Doraemon +WWE action figures +Marvel vs. Capcom 3: Fate of Two Worlds +Bugatti Automobiles +Fire Emblem +Border Collie +Aircraft carrier +Snow blower +Culinary art +Ken Masters +Seafight +Sport bike +Dentist +Easter egg (media) +Joystick +Tuna +Crysis 2 +Audi Quattro +Academy Awards +Ponytail +Ramen +Hummer +Fishing tackle +Final Fantasy X-2 +Coupon +Porsche Carrera +Wood carving +Rocksmith +Wallet +Refrigerator +Koi +Battlefield Heroes +Phonograph +Onion +Biceps curl +Trainz +Hat +Jubeat +Nissan Skyline GT-R +Mattel +GameCube +LittleBigPlanet 2 +Epiphone +Inazuma Eleven +Soft tennis +Killer whale +Hair straightening +Merienda +The Witcher (video game) +Skate (video game) +Live for Speed +Rooster +Chihuahua (dog) +Triangle +Land Rover Defender +Marvel Legends +Trousers +SD Gundam Capsule Fighter +Ratchet & Clank +Doughnut +Hatsune Miku: Project DIVA F +Bouzouki +Domestic canary +Half-Life (video game) +Raven (comics) +Black Butler +Mario Kart 8 +Chili pepper +BMW 5 Series +Hail +Ouran High School Host Club +Brain +Chinese cuisine +Playmobil +Model building +Ribbon +Pit bike +Sonic Unleashed +Solar panel +Orange (fruit) +Otis Elevator Company +Mu Online +Hang gliding +Path of Exile +Animal Crossing: New Leaf +Steel guitar +Sword Art Online +Lego Ninjago +Paddle +Second Life +Aikatsu! +IPhone 5C +Gothic (series) +Batman: Arkham Asylum +Carburetor +Crab +Espresso machine +The Phantom of the Opera (1986 musical) +Hellsing +Spider +Super Mario Galaxy 2 +Duel Masters Trading Card Game +Drywall +Laundry +United States Air Force +Assassin's Creed: Revelations +Corel +Omelette +Composer +Ford Escort (Europe) +Grape +Honda CB600F +Tea +Elmo +Temple +Need for Speed: Carbon +Catamaran +Perfect World (company) +Skate 3 +Missile +Infomercial +Chevrolet Chevelle +Airport terminal +Crysis (video game) +StepMania +Red Dead Redemption +Atari +Couch +The Idolmaster +Beatmania IIDX +Big wave surfing +Tokyo Mew Mew +Wheat +Warhammer Fantasy Battle +Rock (geology) +Snowplow +Submarine +Doctor Eggman +Wood flooring +Bangs (hair) +Yamaha YZF-R6 +Pontiac Firebird +Red Dead +Field hockey +Vineyard +Waterfowl hunting +Domestic pigeon +Toyota Hilux +CNET +Preacher +Sonic Adventure +Lamborghini Murciélago +Marinera +Screen printing +Crazyracing Kartrider +The Legend of Zelda: Majora's Mask +Sunglasses +Log cabin +Fungus +Wedding photography +Flag +Devil May Cry 4 +Cappuccino +Flamenco guitar +Projector +Rock dove +The Elder Scrolls Online +LittleBigPlanet (2008 video game) +Digital video recorder +Djembe +Vending machine +Mehndi +Telescope +Flyff +Pattern (sewing) +Stairs +Nissan 350Z +Cell (biology) +Need for Speed: Underground 2 +Incandescent light bulb +Gallon +Greeting card +Balloon modelling +Sensor +Realm of the Mad God +Nest +Writing +Logic Pro +Opel Astra +Campervan +Cooked rice +Muffin +Wind power +Hedgehog +Soft drink +Calculator +Harness racing +Buick +Beast (Disney) +Destroyer +Point guard +Forza Horizon +Mercedes-Benz SLS AMG +Supermarket +Catfish +Final Fantasy XI +The Last of Us +Battleship +Dodge Challenger +Peter Pan +Metal Gear Solid 4: Guns of the Patriots +Toyota 86 +Bakery +Compact disc +Backhoe +Saddle +Total Drama Island +Erhu +Bumblebee (Transformers) +Cajón +Beatmania +Ice rink +Child safety seat +Honda S2000 +Samsung Galaxy Note II +Higurashi When They Cry +Union Pacific Railroad +BMW 3 Series (E90) +V6 engine +BlazBlue +Rottweiler +Necktie +Image scanner +White-tailed deer +TV4 (Sweden) +Bishop +Need for Speed: Hot Pursuit (2010 video game) +Princess Peach +Rust (video game) +Doom (1993 video game) +Fender Custom Shop +Smite (video game) +Nissan Silvia +??? +Pudding +Sephiroth (Final Fantasy) +Irish dance +MacBook Air +Commodore 64 +IMac +Space Shuttle +Automobile repair shop +Collie +Dragon Age: Origins +Sangokushi Taisen +Calligraphy +Black belt (martial arts) +??? +Valve +Crisis Core: Final Fantasy VII +Two-stroke engine +Killzone (series) +Full moon +Hunter × Hunter +New York City Subway +Latte +Mercedes-Benz S-Class +Tetris +Samurai +Predator (alien) +Arabian horse +Mercedes-Benz E-Class +Spinach +Dōjinshi +Polar bear +Body piercing +Amazon Kindle +Biology +Key (lock) +Mobile Suit Gundam: Extreme Vs. +Rappelz +Bobber (motorcycle) +Toy balloon +Mexican cuisine +Rope +Taco +Taxicab +Infestation: Survivor Stories +Clutch +PlayStation Network +Garage (residential) +Milkshake +Cloud Strife +Honda Integra +Eintopf +Primary school +Kingdom Hearts Birth by Sleep +Resident Evil (1996 video game) +Foal +GameSpot +Castle +Human hair color +Scorpion (Mortal Kombat) +Poultry +Poodle +Vans +Forza Horizon 2 +Zero (Mega Man) +Toyota Camry +Chemical reaction +Test Drive Unlimited 2 +Bacon +Mario Party +18 Wheels of Steel +Goose +Sausage +Compost +Cucumber +French horn +Analog synthesizer +Siamese fighting fish +??? +Las Vegas Strip +Crysis 3 +School bus +Oculus Rift +Carnival Cruise Line +Honda CBR600RR +Pokémon Red and Blue +Autobot +Christ (title) +Cockatiel +Ace Combat +Mazda MX-5 +Countertop +Safari +Final Fantasy XIV: A Realm Reborn +Track (rail transport) +Ganon +Two-wheel tractor +??? +Watermelon +Paper plane +Rainbow trout +??? +Tony Hawk's (series) +Korean cuisine +Lip balm +Angry Birds (video game) +Lead guitar +Pug +Monster Hunter Tri +Playground +God of War III +Herd +Niko Bellic +Bungee jumping +Soil +Subway Surfers +Hindu temple +Audi A6 +Hogwarts +Eggplant +Mabinogi (video game) +Sugar +Makeup brush +Rocksmith 2014 +Ocean +Asphalt (series) +Dental braces +Bob cut +Nissan 240SX +Cement +Sharpening +Leopard +United States Army +Tom and Jerry +Xbox 360 controller +Dragon Ball: Raging Blast 2 +Winnie the Pooh (franchise) +Trophy +Inazuma Eleven (manga) +Owl +Street Fighter II: The World Warrior +Golf ball +Floyd Mayweather Jr. vs. Manny Pacquiao +Belt (clothing) +Slender: The Eight Pages +Test Drive Unlimited +Super Mario Bros. 3 +Power supply +Retail +Venom (comics) +IPad (3rd generation) +Teddy bear +Denim +Baseball bat +Halo 3: ODST +Train Simulator (Dovetail Games) +Bowhunting +Lotus Cars +Pineapple +Boeing 737 Next Generation +Audi A3 +Dreamcast +City-building game +Diablo II +Suzuki Hayabusa +Gamepad +Electrical wiring +Kitchen stove +Yamaha Aerox +Monster Hunter Portable 3rd +BMX racing +Katara (Avatar: The Last Airbender) +HP Pavilion (computer) +Emirates (airline) +Amiga +Touchscreen +Winter storm +Driver (video game series) +Pac-Man +Fantage +Land Rover Discovery +Flash (photography) +Human back +Intermodal container +Infiniti +Guilty Gear +Animal shelter +Butterfly +Piccolo (Dragon Ball) +Bicycle frame +Boeing 787 Dreamliner +Toontown Online +Renault Mégane +Age of Empires +Canyon +Ski jumping +Lumber +Carousel +Phantasy Star Online 2 +Dodge Viper +Madden NFL 13 +A-18 Hornet +String trimmer +Mattress +Mixer (cooking) +Sub-Zero (Mortal Kombat) +Ford Ranger (North America) +ESPN +ABS-CBN News and Current Affairs +Synchronised swimming +G-Shock +??? +Angel +Champion +Horse show +??? +Rurouni Kenshin +Halo 5: Guardians +Coconut +Deep frying +Dollhouse +Campus +Volkswagen Golf Mk6 +Curtain +Mountain pass +Dojo +Boiler +PRS Guitars +Diesel locomotive +Monster Hunter 4 +French Bulldog +Prince (Prince of Persia) +Fixed-gear bicycle +Ninja Gaiden +Samsung Galaxy Note 3 +Opel Corsa +Jack Sparrow +Boeing 767 +Lexus IS +Tales of Symphonia +Autumn +Inline skating +Filter (aquarium) +Naruto Shippuden: Ultimate Ninja Storm Generations +Garmon +Flower bouquet +SimCity +Gravy +Bully (video game) +French fries +Kawasaki Ninja 250R +Rock fishing +Batman: Arkham Origins +Ceiling fan +Audi TT +Space Marines (Warhammer 40,000) +Acer Aspire +D.Gray-man +Duct tape +Electromagnetic coil +Heroes of the Storm +Tom Clancy's Ghost Recon +Sponge cake +Steelpan +Modem +The King of Fighters 2002 +Dying Light +Need for Speed: Shift +Riot Games +Rainbow +Bean +Chevrolet Opala +Reborn! +Floral design +Megatron +Kawasaki Ninja ZX-6R +Agriculture +Cottage +Television presenter +Metal Gear Solid V: The Phantom Pain +Juicing +BioShock +Plymouth (automobile) +Crêpe +Fist of the North Star +The Legend of Zelda: The Wind Waker +X-Men +Piston +Deck (building) +Nativity scene +Sega Saturn +Stardoll +Just Dance (video game) +Chun-Li +BMW R1200GS +LG G3 +Fisheye lens +Dragon Ball: Raging Blast +Big Boss (Metal Gear) +Dam +Gel +JBL +Dachshund +Bane (comics) +E-reader +The Lord of the Rings Online +Ferb Fletcher +Yeast +Monastery +Vampire Knight +Vodka +IPhone 3G +Tricycle +Metal Slug (series) +Steel +LED lamp +Geometry Dash +Dominoes +Gibson Les Paul Custom +Street Fighter III: 3rd Strike +Hay +Honda CR-X +Spray painting +Flip Video +Bald eagle +God of War II +Clay animation +Tomato sauce +Clone trooper +Beagle +Popcorn +Rubber stamp +Clannad (visual novel) +Fried rice +Moto G (1st generation) +Toyota Prius +Mega Man Battle Network +Doom II: Hell on Earth +Grand Theft Auto: Vice City Stories +Deadpool +Phantasy Star +Lock picking +Sugar paste +Chevrolet Caprice +??? +Herb +The Legend of Zelda: Skyward Sword +Domesticated turkey +Final Fantasy VI +BMW S1000RR +Mitsubishi Pajero +Mazda3 +IKEA +Chevrolet S-10 +Paper Mario +India TV +Tow truck +Orochimaru (Naruto) +Ape +Line (geometry) +Kawasaki Ninja ZX-10R +Aerosol spray +Power supply unit (computer) +Zucchini +Doberman Pinscher +Wolfenstein (series) +Contortion +Fertilizer +Cooler Master +Highway +Chocolate brownie +Street Fighter III +Tsubasa: Reservoir Chronicle +Parking +Olaf (Disney) +Frets on Fire +Multi-function printer +Suzuki GSX-R1000 +Lush (company) +Hang (instrument) +Nexus 7 (2012) +Skyscraper +Gorilla +Ōendan +Puff pastry +Crossbow +Forza Motorsport 5 +Uncharted 2: Among Thieves +Pokémon Mystery Dungeon +Closet +??? +Daytona International Speedway +VTEC +Cheerleading +Slot car +Garden railway +Albert Wesker +Naruto Shippuden: Ultimate Ninja Storm 2 +Sewing needle +Trials (series) +Sheriff Woody +K +Straw +Mitsubishi Eclipse +Frisbee +TrackMania +Manure +Chocolate chip +Cart +Borderlands: The Pre-Sequel +Diving +Wood-burning stove +Medal game +Chrono Trigger +Sherlock Holmes +Library +Volkswagen Golf Mk2 +Guzheng +Malinois dog +Goofy +Pedal steel guitar +Virtua Fighter 5 +Lego Marvel Super Heroes +Kantai Collection +Electric violin +Firewood +Devil May Cry 3: Dante's Awakening +Digital painting +Flair bartending +Boxer (dog) +Melon +Low-carbohydrate diet +Škoda Octavia +The Crew (video game) +Unicycle +GAZ +Gummy bear +Marker pen +Need for Speed: The Run +Dead Space (2008 video game) +Duke Nukem +Dirt 3 +Movie theater +Final Fantasy XIII-2 +Comet +WWE SmackDown vs. Raw 2010 +Gran Turismo 4 +Star Wars: Battlefront II +Lamb and mutton +Ant +Loki (comics) +Percy the Small Engine +Villain +Plumbing +Avocado +BioShock Infinite +Dormitory +Mango +Lucky Star (manga) +Shadow the Hedgehog (video game) +Cabbage +Peanut butter +Didgeridoo +Hard Rock Cafe +Donkey Kong Country +Amazon.com +Star Wars Battlefront (2015 video game) +Harpsichord +Aston Martin Vantage (2005) +Suzuki Swift +Crocodile +Jet engine +Sonic the Hedgehog 2 +Delta Air Lines +Harry Potter and the Deathly Hallows +Trunk (car) +Zangief +Brave Frontier +Chuck E. Cheese's +Iori Yagami +Robotics +Kebab +Cheeseburger +Hatsune Miku: Project DIVA F 2nd +Humbucker +Camcorder +Mega Man X (video game) +Landscape +Shih Tzu +Volkswagen Golf Mk4 +Pollution +Guppy +Coffeehouse +Killer Instinct +Crusher +Allods Online +??? +Boeing 757 +Eclipse +Meatball +Saints Row 2 +Roulette +Grand Theft Auto: Liberty City Stories +Walleye +Walmart +Bearing (mechanical) +Forest +Forever 21 +Canvas +Rat rod +Soulcalibur V +Sonic the Hedgehog (2006 video game) +Multirotor +??? +LG G2 +Moisturizer +Halo: The Master Chief Collection +SEAT León +Skylanders: Swap Force +Pan flute +Chevrolet Tahoe +Metal Gear Online +Fiat 126 +Mount & Blade: Warband +Kennel +Vibraphone +Satellite +Yamaha Raptor 700R +Sonic & Knuckles +Honda Fit +Caridea +Armored Core +Bull Terrier +Firefighting +Catwoman +Octopus +Fencing +Sitar +Limousine +Nintendo DSi +HTC One (M8) +McDonnell Douglas F-15 Eagle +Rat +GoldenEye 007 (1997 video game) +Gasoline +Ken (doll) +Quadracycle +Dead or Alive (series) +Microsoft Surface +Scooby-Doo +Landscape painting +Toyota Land Cruiser Prado +Hair removal +Sink +Mount & Blade +BMW 5 Series (E39) +Mewtwo +Mambo (music) +The Witcher 2: Assassins of Kings +North American P-51 Mustang +Alien (creature in Alien franchise) +Cloud +Forge +Christian Church +Tom Clancy's Rainbow Six +Mirror +Chevrolet Big-Block engine +Chevrolet Corvette (C6) +Abarth +Mazda RX-8 +Pendant +Metal Gear Solid 3: Snake Eater +Buffet +Haunted house +Cockatoo +Royal Air Force +The Embodiment of Scarlet Devil +LG G series +Fishing vessel +DualShock +Sonic Heroes +Drawer (furniture) +BMW 1 Series +Werewolf +DatPiff +Koi pond +Toyota Celica +Twelve-string guitar +Potato chip +Stargate +Killer Instinct (2013 video game) +Caramel +Sprite (computer graphics) +NHL 14 +Ham +Sky +Sweater +Chocolate chip cookie +stay night +Text (literary theory) +Skate 2 +Engraving +Final Fantasy XV +Cornrows +Light Yagami +Floristry +Sly Cooper +Volkswagen Golf Mk5 +Snowman +??? +Vox (musical equipment) +Happy Farm +Orc +Suit (clothing) +PC game +Ace Online +Saints Row IV +Slingshot +Dead Island +Ratchet (Ratchet & Clank) +Gears of War: Judgment +Dragon Quest X +Furby +Crayon Shin-chan +Soprano saxophone +Tifa Lockhart +European perch +Patio +Fried chicken +Sawmill +Mirror's Edge +Canon PowerShot +Guitar Hero: Warriors of Rock +Rome: Total War +Hummer H2 +Radar +Final Fantasy IV +Table saw +Barista +BMW 7 Series +Camel +Windows Media Video +Felt +Audi S4 +Cowboy +Molding (process) +Contact lens +Fiat Punto +The Hobbit +Indoor cycling +Sunset +??? +Persian cat +Hitman: Absolution +Battlefield: Bad Company +Eren Yeager +Sinterklaas +Crash Bandicoot (video game) +Midnight Club: Los Angeles +Metal Gear Rising: Revengeance +Hand-to-hand combat +Avon Products +Log splitter +Stormtrooper (Star Wars) +Epic Rap Battles of History +Shed +Walking +Belt (mechanical) +Hot dog +Sock +Chicken coop +Humpback whale +Character (arts) +Peugeot 106 +Toast +Princess Jasmine +Exercise ball +Fox +Green Lantern +Looney Tunes +Wedding ring +Tap (valve) +Charizard +Mii +Rolls-Royce Limited +Copic +Mega Man Zero (video game) +Jak and Daxter +Priston Tale +Glacier +IPod Nano +Banknote +Mario & Sonic at the Olympic Games +Hero Factory +Bamboo +Fillet (cut) +Stencil +Winch +Dogfight +Treadmill +Bassoon +Staffordshire Bull Terrier +Cardboard +Epiphone Les Paul +Compact Cassette +Gelatin +White House +Suitcase +MX vs. ATV +Clank (Ratchet & Clank) +Beach volleyball +Loadout +Batter (cooking) +Zack Fair +Cliff +Baggage +Cream cheese +Lantern +Naruto: Clash of Ninja +Treasure +Raccoon +Mini 4WD +Robotic vacuum cleaner +Gate +Ribs (food) +Oatmeal +Water filter +Super Mario Sunshine +Animal Crossing: City Folk +Driver's license +Asus ZenFone +American black bear +Little Red Riding Hood +??? +Stable +Gashapon +Need for Speed: Underground +Dishwasher +Frying pan +Schutzhund +Mario Kart 7 +Disney Infinity +Saab Automobile +F-Zero +Halloween costume +Thor (Marvel Comics) +Foam +Tokyo Ghoul +Chevrolet Monte Carlo +Flush toilet +Axe +Worms (series) +Marble +Driver's education +Madden NFL 12 +Pressure washing +Christmas ornament +Buffalo wing +Duct (flow) +Indiana Jones +Chart +Yoshi's Island +Subaru Forester +Scar (The Lion King) +Mousse +Lalaloopsy +Micropterus +Gibson SG +Express train +Citroën C4 +Submission wrestling +Broccoli +Donkey Kong Country 2: Diddy's Kong Quest +Barrel organ +Mega Man 2 +Dragon boat +New Super Mario Bros. U +Gecko +Pillow +Kemenche +Porsche Cayenne +??? +Shift 2: Unleashed +Bomberman +Dungeons & Dragons +BeamNG.drive +AdventureQuest +Mario Kart 64 +Disc brake +Bloons Tower Defense +Forza Motorsport 3 +Guitar Center +Super Smash Bros. (video game) +Fiat Uno +Printed circuit board +Porcelain +E-book +Macaroni +Lego Friends +Max Payne 3 +StarCraft II: Heart of the Swarm +Medal of Honor: Warfighter +Kamaz +Air France +Porsche Carrera GT +Black Rock Shooter +Rosary +Halo Wars +Car dealership +Toys "R" Us +Total War: Rome II +Need for Speed: ProStreet +Mansion +Cheetah +Marshmallow +Shorts +Unturned +Charango +Lithium polymer battery +Sea turtle +Vatican City +Starbucks +Emergency vehicle lighting +Volkswagen Golf Mk1 +Lupin the Third +Pearl +Wii Sports +Hero +Chrysler 300 +GMC (automobile) +Charm bracelet +Kamen Rider Battle: Ganbaride +Ys (series) +Asus Eee Pad Transformer +BMW 5 Series (E60) +Ford Mustang SVT Cobra +Autocross +Royal icing +Laboratory +Peugeot 206 +Maltese (dog) +Soulcalibur IV +Wardrobe +Garlic +Tugboat +Luke Skywalker +Electronic circuit +Coat (clothing) +Passenger +??? +Cactus +Ford Crown Victoria +Elfen Lied +Circular saw +Radha +Welsh Corgi +Eiffel Tower +Softail +Bajo sexto +Lobster +Colt (horse) +Solar eclipse +Greyhound +Pepsi +Black Widow (Natasha Romanova) +Virtua Fighter +Filly +Canning +Fat +Goth subculture +Slow cooker +Lightning (Final Fantasy) +Water polo +Apple pie +Inkjet printing +Mercedes-Benz SLK-Class +Bandsaw +Cammy +Fight Night (EA video game series) +Tortoise +Multicooker +Ferret +Dipping sauce +Circle +Rocket launch +Pembroke Welsh Corgi +Cold porcelain +Battlefield Play4Free +ThinkPad +BMW X6 +??? +Sony Xperia Z +Selfie +Mahjong +Cherry +IPod Touch (5th generation) +Colin McRae: Dirt 2 +Tekken 5 +Shawl +Ultron +Guitar pick +Elk +Sunrise +Amusement arcade +Hammock +Decoupage +Mug +Sander +Autogyro +Woodchipper +Texas Instruments +Baby Alive +Tarantula +Shrub +Donkey Kong (video game) +Coating +Steirische Harmonika +Racing wheel +Raphael (Teenage Mutant Ninja Turtles) +Bank +Opel Vectra +Skull +Sand art and play +Birth +Lasagne +Infinity Ward +Philippine cuisine +Custard +Lettuce +Megami Tensei +Flappy Bird +Sleeping Dogs (video game) +Fender Jazz Bass +Devil Kings +Blouse +Notebook +Aloe vera +Funko +Lelouch Lamperouge +Macramé +Casserole +Capacitor +I Wanna Be the Guy +Hose +Subaru Legacy +Star Citizen +Sabian +Ventriloquism +Call of Duty (video game) +Kindle Fire +Starfire (Koriand'r) +Zeus +Microscope +Basket +Coyote +Bart Simpson +Volvo FH +Spinnerbait +Honda CR-V +Sony Xperia Z1 +Satan +Mercedes-Benz Sprinter +Team roping +Jeep Cherokee (XJ) +Friendship bracelet +Leonardo (Teenage Mutant Ninja Turtles) +Single track (mountain biking) +Chickpea +Vegetable carving +??? +Spark plug +Akita (dog) +Canoeing +Recumbent bicycle +Boom Beach +Puppetry +Sport stacking +Kendama +Punching bag +Staples Center +Marvel vs. Capcom 2: New Age of Heroes +Apple TV +Davul +Scratchcard +Disgaea +Larva +Used car +DmC: Devil May Cry +Kyo Kusanagi +Mega Man (video game) +K'Nex +Burger King +Dungeon crawl +Pro Evolution Soccer 2009 +Blueberry +Village +Convenience store +Golf cart +BMW M6 +Fiber +Resistance (series) +Picture frame +Trouble in Terrorist Town +Volkswagen Type 2 +Domestic pig +Grand Tourer Injection +Alucard (Hellsing) +Aerith Gainsborough +Batmobile +Gummi candy +Cauliflower +Marlin +Gold medal +Shin Megami Tensei: Persona 3 +Table football +Shikamaru Nara +Truggy +Ford Explorer +Chevrolet Cruze +American Airlines +Jupiter +Galaxy Nexus +KFC +Spec Ops: The Line +Rigs of Rods +EA Sports UFC +Plastic bottle +Hubble Space Telescope +Barn +Hand +Star Wars: Battlefront (2004 video game) +Digimon Masters +Gibson ES-335 +Waffle +Paper model +Ressha Sentai ToQger +Gas tungsten arc welding +Pavement (architecture) +Sonic & Sega All-Stars Racing +??? +Palace +Stealth game +God of War (2005 video game) +Mazda6 +Dragon Age II +Warhammer Online: Age of Reckoning +Switch +Grizzly bear +??? +H.A.V.E. Online +Lowlands (festival) +Wok +Window blind +Nokia N8 +Android Wear +V10 engine +Toyota Tundra +Marble (toy) +Alligator +Screencast +Range Rover Sport +Moose +Polo +Laminate flooring +BVE Trainsim +Baby sling +Garage door +Compact car +Dishonored +Parrot AR.Drone +Giraffe +Need for Speed Rivals +McLaren 12C +Pork ribs +Track cycling +Don't Starve +Marvel: Avengers Alliance +Popeye +Ford Mondeo +HTC One (M7) +Pyramid +Asphalt +Beetle +Canon EOS 600D +Oldsmobile Cutlass +Suzuki GSX-R750 +Audi A8 +World of Warcraft: The Burning Crusade +Homing pigeon +NHL 15 +Touring motorcycle +Goblin +Nissan 370Z +Metro: Last Light +Skylanders: Giants +Ran Online +Gear +Mercedes-Benz G-Class +Travian +Burnout Paradise +Tag team +Electric motorcycles and scooters +Kazuya Mishima +Serious Sam +Nexus 7 (2013) +Super Paper Mario +Doodle +Gelatin dessert +Andalusian horse +Warrior +Ferrari 360 +DVD player +WildStar (video game) +Hyundai Genesis +Chutney +Pizzica +Dead Rising 2 +Potter's wheel +Yoda +Cylinder (engine) +M. Bison +Metal Gear Solid: Peace Walker +Masonry +Edward Elric +Split (gymnastics) +Mario Kart DS +Ghost Rider +Grand Theft Auto: Episodes from Liberty City +F1 2012 (video game) +Cookie Monster +Red hair +Nami (One Piece) +Canon EF lens mount +Finger +Asteroid +Nissan Navara +Riddler +Traffic light +Nikon Coolpix series +Dragonica +Broth +Metal Gear Solid 2: Sons of Liberty +Samsung Galaxy Y +Wedding cake +Half-pipe +Gothic II +Vehicle horn +Motor oil +Credit card +Resident Evil 2 +British Airways +Great Dane +Stain +Super Mario 3D World +Yamaha YZ125 +Atari 2600 +Rover (space exploration) +Cayman +Ragdoll +Basement +Betta +Mobile home +Heroes of Might and Magic +Photograph +Wreath +Universe of The Legend of Zelda +Lamborghini Diablo +Albus Dumbledore +BlackBerry Bold +Prototype 2 +Soybean +Hurdling +Spock +Sony Xperia Z2 +Monopoly (game) +Fruit preserves +SimCity (2013 video game) +Cutlet +Volkswagen Touareg +Aerosol paint +Risotto +Toyota 4Runner +Driveclub +Moshing +Total War: Shogun 2 +Elf +Hot tub +President +NHL 13 +Rudolph the Red-Nosed Reindeer +Bugs Bunny +Mario & Luigi: Superstar Saga +Tulip +Paper Mario: The Thousand-Year Door +Hammer +EarthBound +Meta Knight +La Tale +Shadow of the Colossus +GLaDOS +Hunting dog +BioShock 2 +Supercars Championship +Orbit +God of War: Ascension +Bloons +Ney +Toyota MR2 +Cam +??? +Zoom lens +H&M +Hovercraft +Sanshin +Instant noodle +Luigi's Mansion +Tales of Vesperia +Dekotora +??? +Talking Tom and Friends +Baseball glove +Ale +Meringue +Canon EOS 7D +Shaolin Kung Fu +Hawk +Donkey Kong Country Returns +The Salvation Army +Brown trout +Sugarcane +Cake pop +Suzuki Bandit series +Green tea +Warehouse +Appalachian dulcimer +Kermit the Frog +Unicorn +Fountain pen +Acer Iconia +Master System +Robocraft +Merlin +Sweet potato +Alice's Adventures in Wonderland +Solar flare +DigiTech +Saturn +Flash (comics) +Reindeer +Justice League +Line Rider +Runes of Magic +Chevrolet Suburban +Michael Myers (Halloween) +Need for Speed: Undercover +Wand +Chevrolet Malibu +Coal +Antena 3 (Spain) +Driver: San Francisco +Font +Stingray +Thermostat +Toph Beifong +Vert ramp +Ridge Racer +Goat Simulator +Lineage (video game) +CNBC +Juri (Street Fighter) +TARDIS +Pigeon racing +Lap steel guitar +Shovel +Mosaic +Monster Retsuden Oreca Battle +Pair skating +Wallpaper +The Simpsons: Tapped Out +The Elder Scrolls III: Morrowind +Padel (sport) +Fender (vehicle) +Furnace +Nissan Altima +Cornet +Škoda Fabia +Lockheed Martin F-35 Lightning II +Electribe +Alesis +Motorola Razr +Halo: Combat Evolved Anniversary +Darksiders +Neo Geo (system) +Snail +Milking +Pluto (Disney) +Peanut +Verona Arena +Chubby Bunny +Jerry Mouse +Corvette Stingray (concept car) +Cigarette +Cube World +??? +Cybertron +Dacia Duster +Pastel +Transformer +Split screen (computer graphics) +Sukhoi Su-27 +Gabrielle (Xena: Warrior Princess) +Opel Kadett +Nokia Lumia 920 +Twin-turbo +Jiraiya (Naruto) +The Legend of Zelda: A Link to the Past +Crappie +Rechargeable battery +??? +Super Mario 3D Land +??? +DragonFable +Aragorn +Crash Bandicoot 2: Cortex Strikes Back +Southwest Airlines +Multi-tool +Passport +Porsche Panamera +Airship +Tuxedo Mask +Tom Clancy's Ghost Recon: Future Soldier +Melty Blood +Beam (structure) +Gas metal arc welding +Audi Q7 +Bell pepper +Chewing gum +Drinking water +Heat pump +Kenshiro +Patrick Drake and Robin Scorpio +Miniature wargaming +Kawasaki Ninja 650R +Captain Falcon +J-Stars Victory VS +Imperishable Night +Citrus +Drift trike +Optical illusion +Command & Conquer: Red Alert 3 +Suzuka Circuit +Mayonnaise +Quake III Arena +Keychain +God Mode +Ford Bronco +Crocodilia +Black and white +Llanero +Monorail +Nova +G.I. Joe +S.T.A.L.K.E.R.: Call of Pripyat +Perfect Cherry Blossom +Wine tasting +Olive +Ultra Series +Beat 'em up +Jellyfish +Lego Legends of Chima +Sauna +Tom Clancy's Splinter Cell: Blacklist +Starscream +Aang +Misty (Pokémon) +IPad Air +Ice pop +Lute +Jigsaw puzzle +Baritone saxophone +BMW Z4 +Mana (series) +Motorized bicycle +Dalmatian (dog) +Bose Corporation +Burton Snowboards +Kingdom Hearts: Chain of Memories +Mass Rapid Transit (Singapore) +Boombox +Napkin +Chimpanzee +Guitar Hero: Metallica +Radar detector +Honda NSX +Empire: Total War +Darts +Light fixture +Super Mario Bros. 2 +Temple Run +Kristoff (Disney) +Adrenalyn XL +Tatra (company) +Mini-Z +Tin can +Market garden +Mercedes-Benz Actros +Hug +Whipped cream +Wasp +Oni +Princess Daisy +Constellation +HTC One X +Fender Precision Bass +Prawn +Christmas card +Handbell +Coconut milk +Toshiba Satellite +Riven +Referee +Dragon's Dogma +Dalek +Folding bicycle +2 Days +Kimono +Seiko +Hippopotamus +Resident Evil: Revelations +Billboard (magazine) +Padlock +Butterfly stroke +Mashed potato +Yuan Zai (giant panda) +Aurora +Mop +Tubing (recreation) +Clothes iron +Order & Chaos Online +Zebra +Crème caramel +Warhammer 40,000: Dawn of War +Tom Clancy's Splinter Cell: Conviction +Wakfu +Stitch (Lilo & Stitch) +Calf +Cars 2 (video game) +Crayfish +Engagement ring +Infamous Second Son +Jukebox +Biryani +DJ Hero +Super GT +Chameleon +Oyster +Warcraft III: The Frozen Throne +Dynasty Warriors 7 +Postage stamp +Derek Shepherd +Plotter +Amnesia: The Dark Descent +Jinn +Rayman Legends +Tinker Bell +Patchwork +Doom 3 +Wat +Paiste +Mercedes-Benz CLS-Class +Liquid +GameTrailers +Pep squad +Clam +SaGa (series) +Nollie +Company of Heroes +Green Arrow +Naruto Uzumaki +DeWalt +Putter +Family +Transistor +SOCOM (series) +Pea +Social media +Aliens vs. Predator (2010 video game) +HTC HD2 +Ducati Monster +Aggressive inline skating +Maserati GranTurismo +PortAventura World +Lego Batman: The Videogame +Energy drink +Turban +Pokémon Yellow +Alaskan Malamute +Monica's Gang +Suzuki Vitara +Black Desert Online +Zara (retailer) +Just Dance 2015 +Maid Sama! +Disguise +Kidney +Water well +Farmer +Toyota RAV4 +Night +DJMax +Richter-tuned harmonica +Real Racing 3 +Solid Snake +United States dollar +F1 2010 (video game) +Samsung Galaxy Ace +Trials Evolution +Cadillac CTS +Daihatsu +Balcony +Xperia Play +Rookie +Timing belt (camshaft) +Monster Energy +Ork (Warhammer 40,000) +Toyota JZ engine +Drive-through +Spektrum RC +Hyundai Sonata +Chinchilla +Wii Sports Resort +Interchange (road) +Whitewater slalom +Ticket (admission) +Bayonetta +Salsa (sauce) +PlayStation All-Stars Battle Royale +Lego Minecraft +??? +Mule +Starbound +Scissors +Asparagus +Sony NEX-5 +Electrical connector +Rayquaza +Eight-ball +Steel-string acoustic guitar +Strap +Times Square +Bus driver +SEAT Ibiza +Converse (shoe company) +Atlantic bluefin tuna +Mercedes-Benz W124 +??? +Goggles +Kawasaki Z1000 +Shrimp and prawn as food +Garnier +Semi-trailer +Cod +Carpet cleaning +Lost Planet +Sonic the Hedgehog CD +Final Fantasy V +F1 2013 (video game) +Modelling clay +Audi Sportback concept +WWE All Stars +Mitsubishi Outlander +Punch-Out!! +Disney Infinity: Marvel Super Heroes +Mulch +Willy Wonka +Dead Space 3 +Eurofighter Typhoon +H1Z1: Just Survive +Fakie +Super Mario RPG +Dance Central 3 +Puppet +Cursor (user interface) +Prince of Persia: Warrior Within +Ultimate Mortal Kombat 3 +Macross +Upholstery +The Binding of Isaac (video game) +Deathstroke +The King of Fighters '98 +Dragon Ball Z: Battle of Z +Theatre organ +Valve Corporation +Age of Conan +GameStop +Unreal Tournament +Metroid Prime +Annie (musical) +Cinderella (Disney character) +Eric Cartman +The Prince of Tennis +Kia Sportage +Vase +Nightwing +Wing +Gouken +Loft +Ferris wheel +Newspaper +Cash +A Certain Magical Index +Pretty Rhythm +Marionette +Swing (seat) +He-Man +Cook (profession) +Bentley Continental GT +Shaman King +Hakuōki +Essential oil +Balalaika +Baja 1000 +Hummingbird +PSA HDi engine +Nissan Sentra +??? +Infamous (video game) +Game Boy Color +343 Industries +Six Flags Magic Mountain +Woozworld +It's a Small World +Star Fox 64 +Xenoblade Chronicles +TurboGrafx-16 +Tesla coil +HTC Evo 4G +Super Metroid +Label +Gothic (video game) +Samsung Galaxy Gear +??? +Viola caipira +Space Engineers +Yamaha MT-09 +Mortal Kombat: Armageddon +Angry Birds Star Wars +Aerography (arts) +Python (genus) +Hyundai Elantra +MG Cars +Tesla Model S +Castlevania: Symphony of the Night +Body armor +Bone +Tekken 5: Dark Resurrection +Kimchi +Wedding invitation +Porsche 930 +Whey protein +Winery +Honda Integra DC5 +Hatter (Alice's Adventures in Wonderland) +Double Dutch (jump rope) +Cort Guitars +One-man band +Dentures +Tupperware +The Lion King (musical) +BlackBerry Z10 +Kingdom Hearts III +Zipper +Leaf +Samsung Galaxy Note 10.1 +Bansuri +BMW 5 Series (F10) +Australian Shepherd +Crash Bandicoot: Warped +Pou (video game) +Tilapia +Peugeot 205 +AC Cobra +Tin whistle +Tooth brushing +Battlefield 1942 +Virginia Tech +Quarry +Amphibious ATV +Dome +Portable stove +Sound system (Jamaican) +Suikoden +Lunar eclipse +Tiramisu +Inazuma Eleven GO (video game) +Nissan 300ZX +Neverwinter (video game) +Axle +Altaïr Ibn-La'Ahad +Radiator +Resident Evil (2002 video game) +Prince of Persia: The Sands of Time +Crop circle +Rhinoceros +??? +Bookcase +Common quail +The Hunger Games +Mercedes-Benz A-Class +Sarah Walker (Chuck) +Cinnamon +Hiru TV +Bread roll +Magician (fantasy) +Lotion +Killzone 3 +Cadillac Escalade +Silhouette +Swan +Lemonade +Trabant +Mojito +Fossil +Macy's +Silk +Puma SE +Nissan Maxima +Battlefield 2142 +Twisted Metal +Olive oil +Wii Remote +Universal Studios Hollywood +Berserk (manga) +Wellington boot +Tomb Raider: Anniversary +Almond +Audi RS 6 +Ladder +Fire Emblem Awakening +Stained glass +Tape recorder +Emerald +Ford Fusion (Americas) +Iguana +Might and Magic +Pluto +Mazda Raceway Laguna Seca +Air Force 1 (shoe) +Pub +Oshun +Honda K engine +Nerd +Renault 5 +F1 2011 (video game) +Windscreen wiper +Lex Luthor +Track racing +Escalator +Charlie Brown +Chauffeur +Soba +Window film +Bowl +Alarm clock +Pokémon Mystery Dungeon: Explorers of Time and Explorers of Darkness +Roomba +Honda Shadow +Lightning Returns: Final Fantasy XIII +LATAM Brasil +Top +American Bulldog +Legoland +Caterpillar +Windows Phone 8 +Automated teller machine +Samsung Galaxy S III Mini +Portrait photography +Office +Para Para +Hockey stick +Singapore Airlines +Volvo S60 +Udon +Chevrolet K5 Blazer +Bath & Body Works +Segway PT +Castlevania: Lords of Shadow +Mario Kart: Double Dash +Mew (Pokémon) +Walkman +Mentos +Jilbāb +Canter and gallop +Cinderella +Skylanders: Trap Team +Lego Duplo +Morgan le Fay +Decal +Handycam +Women's Tennis Association +Yeti +Multi-valve +Pokémon Stadium +Matryoshka doll +Lexus LFA +Keirin +??? +Honda Prelude +Burrito +Midna +Shuriken +New Super Mario Bros. 2 +Nebula +BlackBerry PlayBook +Typography +Hare +Mohawk hairstyle +Onsen +Jet pack +Wagon +Just Dance 3 +Nissan S30 +Noah's Ark +Ronald McDonald +Bombardier Dash 8 +Raspberry +Hair dryer +The Simpsons: Hit & Run +Still life +Ice climbing +Lada Riva +Port +Compound bow +Resident Evil 3: Nemesis +R2-D2 +Sand animation +ABS-CBN (television network) +Leica Camera +Final Fantasy (video game) +Arkham Asylum +Dynasty Warriors 8 +Text messaging +Nursery (room) +Donkey Kong 64 +Star Wars Jedi Knight: Jedi Academy +Typing +Mapex Drums +Granado Espada +Calendar +UFC Undisputed 3 +Airbag +DMC World DJ Championships +Gingerbread +Rayman Origins +Lamborghini Reventón +Trials Fusion +Mafia (video game) +Paso Fino +??? +Sport kite +Taco Bell +Envelope +Mazdaspeed3 +Transformers: Generation 1 +Empanada +Mega Man 3 +Transformers: Fall of Cybertron +Rosalina (character) +Mosquito +Volkswagen Tiguan +Metal Gear Solid V: Ground Zeroes +Marmalade +Pandeiro +Miss Saigon +Yosemite National Park +Dutch Warmblood +Pre-flight safety demonstration +Citroën Saxo +Mack Trucks +Medley swimming +??? +Spindle (tool) +Greek cuisine +Hyundai Santa Fe +Chili con carne +Poster +Kawasaki Ninja 300 +Baby food +Grand Theft Auto (Game Boy Advance) +Sim racing +Chromebook +Peter Griffin +Stainless steel +Beverage can +Pixie cut +Chevrolet SS (concept car) +Chokehold +Bullion +Super Mario Kart +The Sims FreePlay +Giant Bicycles +Sgt. Frog +Age of Empires II +Abadá +Kingdom Hearts HD 1.5 Remix +Blackjack +Canon EOS 60D +Filling station +Plywood +Pheasant +Wilson Sporting Goods +Comb +Lighthouse +Rock and Roll Hall of Fame +Tōshirō Hitsugaya +Tales of the Abyss +Maze +Resident Evil: Operation Raccoon City +Cimbalom +??? +Monkey Island (series) +Civilization V +Venus +Peugeot 207 +The Amazing Spider-Man (2012 video game) +Chrono Cross +New Balance +Dassault Rafale +Daredevil (Marvel Comics character) +Silent Hill 2 +Beanie (seamed cap) +Nut (fruit) +Jill Valentine +Scion tC +Percy Jackson +Lord of the Dance (musical) +Far Cry (video game) +Star Wars: The Force Unleashed II +Memory card +Motorola Droid +Skylanders: Spyro's Adventure +Yamaha DT125 +Audi Q5 +Jaguar +Jaguar XJ +Animal Crossing: Wild World +Cockroach +Wetsuit +Funny Car +FarmVille +The Sims 3: Pets +Peel (fruit) +Melting +Aurora (Disney character) +Dry ice +Star Ocean +Duke Nukem Forever +Toribash +Yamaha YZ250 +Tekken 3 +Orihime Inoue +Spyro: Year of the Dragon +Eight-string guitar +Sonic Riders +Penny (The Big Bang Theory) +Honda XR series +Neodymium magnet toys +Leatherman +Maximum Destruction +Super Mario 64 DS +Unreal Tournament 3 +Health club +Chrysler Hemi engine +The North Face +CBS News +Pentium +Cannon +London Fashion Week +Military tactics +Smallmouth bass +Leopard gecko +Top (clothing) +Fable III +Panasonic Lumix DMC-GH4 +Sikorsky UH-60 Black Hawk +Blue Dragon +Loudspeaker enclosure +Ōkami +Tribal Wars +Hot chocolate +Beetroot +??? +Nokia N97 +Blue Exorcist +??? +Sonic and the Black Knight +Headscarf +Plasma display +Woody Woodpecker +??? +Beyblade: Shogun Steel +29er (bicycle) +QR code +Dyson (company) +Yanmar +Gladiator +Nissan Pathfinder +Nissan X-Trail +Autofocus +King Dedede +Zoo Tycoon 2 +Wheat tortilla +Team Rocket +Classical ballet +New York City Police Department +Heihachi Mishima +Crochet hook +Pencil case +Gods Eater Burst +??? +DS 3 +Periodic table +General Electric +Nissan Juke +Lollipop +Jaguar F-Type +MechWarrior Online +Dodge Neon SRT-4 +Fried egg +Revell +Indoor soccer +Gratin +Punisher +Washburn Guitars +Caster board +Eldar (Warhammer 40,000) +Final Fantasy Type-0 +NBA 2K10 +The Lord of the Rings: The Battle for Middle-earth II +Texas Longhorns +3D television +Scorpion +Warhammer 40,000: Dawn of War II +Burpee (exercise) +The Order: 1886 +Poptropica +Tomb Raider: Legend +Pelmeni +Bánh +PriPara +Legacy of Kain +Bowser Jr. +Yonex +Humanoid robot +Sony Ericsson Xperia X10 +Rain gutter +FIFA Street (2012 video game) +Castle Crashers +Meteoroid +Macaroni and cheese +Sega CD +Mac Mini +Tales of Xillia +Sonic Lost World +Orphanage +Siku Toys +Lego Batman 3: Beyond Gotham +Daenerys Targaryen +Orangutan +Town +Command & Conquer: Generals +Samurai Shodown +ZX Spectrum +Quake Live +Weighing scale +Dead Frontier +Wolfenstein: The New Order +Colin McRae: Dirt +Square dance +Assassin's Creed Rogue +Airboat +Uncharted: Drake's Fortune +Diddy Kong +Yamaha Motif +Theremin +Rilakkuma +Tie-dye +Flip-flops +Cylinder +Gothic 3 +Unreal (1998 video game) +Beyond: Two Souls +Umbrella +Dream Club +Gradius +Nexus One +Nokia N900 +Tamagotchi +Husband +Sleeping bag +Look-alike +Papaya +Mother 3 +The Beatles: Rock Band +Prince of Persia: The Two Thrones +??? +Darth Maul +Knife sharpening +Meteor shower +Flugelhorn +One Piece: Pirate Warriors +Asterix +Talk box +With Your Destiny +Alan Wake +Barcode +Recurve bow +Diaper bag +Ferrari F12berlinetta +Taskbar +Mortar (masonry) +Toner (skin care) +Freddy Krueger +Marriott International +Mass Effect (video game) +Hawkeye (comics) +Killing Floor (video game) +Chibiusa +Screenshot +Pear +Injury +Kia Sorento +Shredder (Teenage Mutant Ninja Turtles) +Lifeguard +Kei car +Fight Night Champion +Terra (comics) +Gamblerz diff --git a/mediapipe/graphs/youtube8m/local_video_model_inference.pbtxt b/mediapipe/graphs/youtube8m/local_video_model_inference.pbtxt new file mode 100644 index 0000000..12ed2cb --- /dev/null +++ b/mediapipe/graphs/youtube8m/local_video_model_inference.pbtxt @@ -0,0 +1,178 @@ +input_side_packet: "input_sequence_example_path" +input_side_packet: "input_video_path" +input_side_packet: "output_video_path" +input_side_packet: "segment_size" +input_side_packet: "overlap" + +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:input_sequence_example_path" + output_side_packet: "CONTENTS:input_sequence_example" +} + +node { + calculator: "StringToSequenceExampleCalculator" + input_side_packet: "STRING:input_sequence_example" + output_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" +} + +node { + calculator: "UnpackMediaSequenceCalculator" + input_side_packet: "SEQUENCE_EXAMPLE:parsed_sequence_example" + output_stream: "FLOAT_FEATURE_RGB:rgb_feature_vector" + output_stream: "FLOAT_FEATURE_AUDIO:audio_feature_vector" +} + +node { + calculator: "ConcatenateFloatVectorCalculator" + input_stream: "rgb_feature_vector" + input_stream: "audio_feature_vector" + output_stream: "feature_vector" +} + +node { + calculator: "VectorFloatToTensorCalculator" + input_stream: "feature_vector" + output_stream: "feature_tensor" +} + +node { + calculator: "StringToInt32Calculator" + input_side_packet: "segment_size" + output_side_packet: "segment_size_int" +} + +node { + calculator: "StringToInt32Calculator" + input_side_packet: "overlap" + output_side_packet: "overlap_int" +} + +node { + calculator: "LappedTensorBufferCalculator" + input_stream: "feature_tensor" + output_stream: "lapped_feature_tensor" + input_side_packet: "BUFFER_SIZE:segment_size_int" + input_side_packet: "OVERLAP:overlap_int" + node_options: { + [type.googleapis.com/mediapipe.LappedTensorBufferCalculatorOptions] { + add_batch_dim_to_tensors: true + } + } +} + +node { + calculator: "SidePacketToStreamCalculator" + input_side_packet: "segment_size_int" + output_stream: "AT_ZERO:segment_size_int_stream" +} + +node { + calculator: "VectorIntToTensorCalculator" + input_stream: "SINGLE_INT:segment_size_int_stream" + output_stream: "TENSOR_OUT:segment_size_tensor" +} + +node { + calculator: "PacketClonerCalculator" + input_stream: "segment_size_tensor" + input_stream: "lapped_feature_tensor" + output_stream: "synced_segment_size_tensor" +} + +node { + calculator: "TensorFlowSessionFromSavedModelCalculator" + output_side_packet: "SESSION:session" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowSessionFromSavedModelCalculatorOptions]: { + saved_model_path: "/tmp/mediapipe/saved_model" + } + } +} + +node: { + calculator: "TensorFlowInferenceCalculator" + input_side_packet: "SESSION:session" + input_stream: "NUM_FRAMES:synced_segment_size_tensor" + input_stream: "RGB_AND_AUDIO:lapped_feature_tensor" + output_stream: "PREDICTIONS:prediction_tensor" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: { + batch_size: 32 + } + } +} + +node { + calculator: "TensorToVectorFloatCalculator" + input_stream: "prediction_tensor" + output_stream: "prediction_vector" +} + +node { + calculator: "TopKScoresCalculator" + input_stream: "SCORES:prediction_vector" + output_stream: "TOP_K_INDEXES:top_k_indexes" + output_stream: "TOP_K_SCORES:top_k_scores" + output_stream: "TOP_K_LABELS:top_k_labels" + node_options: { + [type.googleapis.com/mediapipe.TopKScoresCalculatorOptions]: { + top_k: 3 + label_map_path: "mediapipe/graphs/youtube8m/label_map.txt" + } + } +} + +node { + calculator: "OpenCvVideoDecoderCalculator" + input_side_packet: "INPUT_FILE_PATH:input_video_path" + output_stream: "VIDEO:input_video" + output_stream: "VIDEO_PRESTREAM:input_video_header" +} + +node { + calculator: "LabelsToRenderDataCalculator" + input_stream: "LABELS:top_k_labels" + input_stream: "SCORES:top_k_scores" + input_stream: "VIDEO_PRESTREAM:input_video_header" + output_stream: "RENDER_DATA:render_data" + node_options: { + [type.googleapis.com/mediapipe.LabelsToRenderDataCalculatorOptions]: { + color { r: 255 g: 0 b: 0 } + color { r: 0 g: 255 b: 0 } + color { r: 0 g: 0 b: 255 } + thickness: 2.0 + font_height_px: 20 + max_num_labels: 3 + location: TOP_LEFT + } + } +} + +node { + calculator: "PacketClonerCalculator" + input_stream: "render_data" + input_stream: "input_video" + output_stream: "synchronized_render_data" +} + +node { + calculator: "AnnotationOverlayCalculator" + input_stream: "IMAGE:input_video" + input_stream: "synchronized_render_data" + output_stream: "IMAGE:output_video" +} + +node { + calculator: "OpenCvVideoEncoderCalculator" + input_stream: "VIDEO:output_video" + input_stream: "VIDEO_PRESTREAM:input_video_header" + input_side_packet: "OUTPUT_FILE_PATH:output_video_path" + node_options: { + [type.googleapis.com/mediapipe.OpenCvVideoEncoderCalculatorOptions]: { + codec: "avc1" + video_format: "mp4" + } + } +} + diff --git a/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt b/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt new file mode 100644 index 0000000..38a0257 --- /dev/null +++ b/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt @@ -0,0 +1,139 @@ +input_side_packet: "desired_segment_size" +input_side_packet: "record_index" +input_side_packet: "tfrecord_path" +output_side_packet: "yt8m_id" +output_stream: "annotation_summary" + +node { + calculator: "StringToInt32Calculator" + input_side_packet: "record_index" + output_side_packet: "record_index_int" +} + +node { + calculator: "StringToInt32Calculator" + input_side_packet: "desired_segment_size" + output_side_packet: "desired_segment_size_int" +} + +node { + calculator: "TFRecordReaderCalculator" + input_side_packet: "TFRECORD_PATH:tfrecord_path" + input_side_packet: "RECORD_INDEX:record_index_int" + output_side_packet: "SEQUENCE_EXAMPLE:yt8m_sequence_example" +} + +node { + calculator: "UnpackYt8mSequenceExampleCalculator" + input_side_packet: "YT8M_SEQUENCE_EXAMPLE:yt8m_sequence_example" + input_side_packet: "DESIRED_SEGMENT_SIZE:desired_segment_size_int" + output_side_packet: "YT8M_ID:yt8m_id" + output_side_packet: "SEGMENT_SIZE:segment_size" + output_side_packet: "LAPPED_TENSOR_BUFFER_CALCULATOR_OPTIONS:lapped_tensor_buffer_calculator_options" + output_stream: "QUANTIZED_RGB_FEATURE:quantized_rgb_feature" + output_stream: "QUANTIZED_AUDIO_FEATURE:quantized_audio_feature" +} + +node { + calculator: "DequantizeByteArrayCalculator" + input_stream: "ENCODED:quantized_rgb_feature" + output_stream: "FLOAT_VECTOR:rgb_feature_vector" + node_options: { + [type.googleapis.com/mediapipe.DequantizeByteArrayCalculatorOptions]: { + max_quantized_value: 2 + min_quantized_value: -2 + } + } +} + +node { + calculator: "DequantizeByteArrayCalculator" + input_stream: "ENCODED:quantized_audio_feature" + output_stream: "FLOAT_VECTOR:audio_feature_vector" + node_options: { + [type.googleapis.com/mediapipe.DequantizeByteArrayCalculatorOptions]: { + max_quantized_value: 2 + min_quantized_value: -2 + } + } +} + +node { + calculator: "ConcatenateFloatVectorCalculator" + input_stream: "rgb_feature_vector" + input_stream: "audio_feature_vector" + output_stream: "feature_vector" +} + +node { + calculator: "VectorFloatToTensorCalculator" + input_stream: "feature_vector" + output_stream: "feature_tensor" +} + +node { + calculator: "LappedTensorBufferCalculator" + input_stream: "feature_tensor" + input_side_packet: "CALCULATOR_OPTIONS:lapped_tensor_buffer_calculator_options" + output_stream: "lapped_feature_tensor" +} + +node { + calculator: "SidePacketToStreamCalculator" + input_side_packet: "segment_size" + output_stream: "AT_ZERO:segment_size_int_stream" +} + +node { + calculator: "VectorIntToTensorCalculator" + input_stream: "SINGLE_INT:segment_size_int_stream" + output_stream: "TENSOR_OUT:segment_size_tensor" +} + +node { + calculator: "PacketClonerCalculator" + input_stream: "segment_size_tensor" + input_stream: "lapped_feature_tensor" + output_stream: "synced_segment_size_tensor" +} + +node { + calculator: "TensorFlowSessionFromSavedModelCalculator" + output_side_packet: "SESSION:session" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowSessionFromSavedModelCalculatorOptions]: { + saved_model_path: "/tmp/mediapipe/saved_model" + } + } +} + +node: { + calculator: "TensorFlowInferenceCalculator" + input_side_packet: "SESSION:session" + input_stream: "NUM_FRAMES:synced_segment_size_tensor" + input_stream: "RGB_AND_AUDIO:lapped_feature_tensor" + output_stream: "PREDICTIONS:prediction_tensor" + node_options: { + [type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: { + batch_size: 32 + } + } +} + +node { + calculator: "TensorToVectorFloatCalculator" + input_stream: "prediction_tensor" + output_stream: "prediction_vector" +} + +node { + calculator: "TopKScoresCalculator" + input_stream: "SCORES:prediction_vector" + output_stream: "SUMMARY:annotation_summary" + node_options: { + [type.googleapis.com/mediapipe.TopKScoresCalculatorOptions]: { + top_k: 9 + label_map_path: "mediapipe/graphs/youtube8m/label_map.txt" + } + } +} diff --git a/mediapipe/modules/README.md b/mediapipe/modules/README.md new file mode 100644 index 0000000..12ec103 --- /dev/null +++ b/mediapipe/modules/README.md @@ -0,0 +1,18 @@ +# Modules + +Each module (represented as a subfolder) provides subgraphs and corresponding resources (e.g. tflite models) to perform domain-specific tasks (e.g. detect faces, detect face landmarks). + +*Modules listed below are already used in some of `mediapipe/graphs` and more graphs are being migrated to use existing and upcoming modules.* + +| Module | Description | +| :--- | :--- | +| [`face_detection`](face_detection/README.md) | Subgraphs to detect faces. | +| [`face_geometry`](face_geometry/README.md) | Subgraphs to extract face geometry. | +| [`face_landmark`](face_landmark/README.md) | Subgraphs to detect and track face landmarks. | +| [`hand_landmark`](hand_landmark/README.md) | Subgraphs to detect and track hand landmarks. | +| [`holistic_landmark`](holistic_landmark/README.md) | Subgraphs to detect and track holistic pose which consists of pose, face and hand landmarks. | +| [`iris_landmark`](iris_landmark/README.md) | Subgraphs to detect iris landmarks. | +| [`palm_detection`](palm_detection/README.md) | Subgraphs to detect palms/hands. | +| [`pose_detection`](pose_detection/README.md) | Subgraphs to detect poses. | +| [`pose_landmark`](pose_landmark/README.md) | Subgraphs to detect and track pose landmarks. | +| [`objectron`](objectron/README.md) | Subgraphs to detect and track 3D objects. | diff --git a/mediapipe/modules/face_detection/BUILD b/mediapipe/modules/face_detection/BUILD new file mode 100644 index 0000000..b1cddeb --- /dev/null +++ b/mediapipe/modules/face_detection/BUILD @@ -0,0 +1,150 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_by_roi_cpu", + graph = "face_detection_short_range_by_roi_cpu.pbtxt", + register_as = "FaceDetectionShortRangeByRoiCpu", + deps = [ + ":face_detection_short_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_by_roi_gpu", + graph = "face_detection_short_range_by_roi_gpu.pbtxt", + register_as = "FaceDetectionShortRangeByRoiGpu", + deps = [ + ":face_detection_short_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_cpu", + graph = "face_detection_short_range_cpu.pbtxt", + register_as = "FaceDetectionShortRangeCpu", + deps = [ + ":face_detection_short_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_gpu", + graph = "face_detection_short_range_gpu.pbtxt", + register_as = "FaceDetectionShortRangeGpu", + deps = [ + ":face_detection_short_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_common", + graph = "face_detection_short_range_common.pbtxt", + register_as = "FaceDetectionShortRangeCommon", + deps = [ + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_projection_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_full_range_cpu", + graph = "face_detection_full_range_cpu.pbtxt", + register_as = "FaceDetectionFullRangeCpu", + deps = [ + ":face_detection_full_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_full_range_gpu", + graph = "face_detection_full_range_gpu.pbtxt", + register_as = "FaceDetectionFullRangeGpu", + deps = [ + ":face_detection_full_range_common", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_full_range_common", + graph = "face_detection_full_range_common.pbtxt", + register_as = "FaceDetectionFullRangeCommon", + deps = [ + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_projection_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_short_range_image", + graph = "face_detection_short_range_image.pbtxt", + register_as = "FaceDetectionShortRangeImage", + deps = [ + ":face_detection_short_range_common", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_full_range_image", + graph = "face_detection_full_range_image.pbtxt", + register_as = "FaceDetectionFullRangeImage", + deps = [ + ":face_detection_full_range_common", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + ], +) + +exports_files( + srcs = [ + "face_detection_full_range.tflite", + "face_detection_full_range_sparse.tflite", + "face_detection_short_range.tflite", + ], +) diff --git a/mediapipe/modules/face_detection/README.md b/mediapipe/modules/face_detection/README.md new file mode 100644 index 0000000..17cf27b --- /dev/null +++ b/mediapipe/modules/face_detection/README.md @@ -0,0 +1,8 @@ +# face_detection + +Subgraphs|Details +:--- | :--- +[`FaceDetectionFullRangeCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (CPU input, and inference is executed on CPU.) +[`FaceDetectionFullRangeGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt)| Detects faces. Works best for faces within 5 meters from the camera. (GPU input, and inference is executed on GPU.) +[`FaceDetectionShortRangeCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (CPU input, and inference is executed on CPU.) +[`FaceDetectionShortRangeGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt)| Detects faces. Works best for faces within 2 meters from the camera. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/face_detection/face_detection_full_range.tflite b/mediapipe/modules/face_detection/face_detection_full_range.tflite new file mode 100755 index 0000000..98c5c16 Binary files /dev/null and b/mediapipe/modules/face_detection/face_detection_full_range.tflite differ diff --git a/mediapipe/modules/face_detection/face_detection_full_range_common.pbtxt b/mediapipe/modules/face_detection/face_detection_full_range_common.pbtxt new file mode 100644 index 0000000..937e8be --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_full_range_common.pbtxt @@ -0,0 +1,102 @@ +# MediaPipe graph performing common processing to detect faces using +# face_detection_full_range_sparse.tflite model, currently consisting of tensor +# post processing. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionFullRangeCommon" +# input_stream: "TENSORS:detection_tensors" +# input_stream: "MATRIX:transform_matrix" +# output_stream: "DETECTIONS:detections" +# } + +type: "FaceDetectionShortRangeCommon" + +# Detection tensors. (std::vector) +input_stream: "TENSORS:detection_tensors" + +# A 4x4 row-major-order matrix that maps a point represented in the detection +# tensors to a desired coordinate system, e.g., in the original input image +# before scaling/cropping. (std::array) +input_stream: "MATRIX:transform_matrix" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 1 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_height: 192 + input_size_width: 192 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 4 + aspect_ratios: 1.0 + fixed_anchor_size: true + interpolated_scale_aspect_ratio: 0.0 + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 2304 + num_coords: 16 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 6 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + x_scale: 192.0 + y_scale: 192.0 + h_scale: 192.0 + w_scale: 192.0 + min_score_thresh: 0.6 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Projects the detections from input tensor to the corresponding locations on +# the original image (input to the graph). +node { + calculator: "DetectionProjectionCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "PROJECTION_MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt b/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt new file mode 100644 index 0000000..2350401 --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_full_range_cpu.pbtxt @@ -0,0 +1,80 @@ +# MediaPipe graph to detect faces. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "face_detection_full_range_sparse.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionFullRangeCpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionFullRangeCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input CPU image (ImageFrame) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms the input image into a 192x192 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" + delegate { + xnnpack {} + } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionFullRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt b/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt new file mode 100644 index 0000000..703b717 --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_full_range_gpu.pbtxt @@ -0,0 +1,80 @@ +# MediaPipe graph to detect faces. (GPU input, and inference is executed on +# GPU.) +# +# It is required that "face_detection_full_range_sparse.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionFullRangeGpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionFullRangeGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input GPU image (GpuBuffer) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms the input image into a 128x128 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" + # + delegate: { gpu { use_advanced_gpu_api: true } } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionFullRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_full_range_image.pbtxt b/mediapipe/modules/face_detection/face_detection_full_range_image.pbtxt new file mode 100644 index 0000000..4e0bc0b --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_full_range_image.pbtxt @@ -0,0 +1,86 @@ +# MediaPipe graph to detect faces. (GPU/CPU input, and inference is executed on +# GPU.) +# +# It is required that "face_detection_full_range_sparse.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" +# path during execution. + +type: "FaceDetectionFullRangeImage" + +# Image. (Image) +input_stream: "IMAGE:image" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Transforms the input image into a 128x128 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: CONVENTIONAL + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +# TODO: Use GraphOptions to modify the delegate field to be +# `delegate { xnnpack {} }` for the CPU only use cases. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite" + # + delegate: { gpu { use_advanced_gpu_api: true } } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionFullRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite b/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite new file mode 100755 index 0000000..9575d8c Binary files /dev/null and b/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite differ diff --git a/mediapipe/modules/face_detection/face_detection_short_range.tflite b/mediapipe/modules/face_detection/face_detection_short_range.tflite new file mode 100755 index 0000000..659bce8 Binary files /dev/null and b/mediapipe/modules/face_detection/face_detection_short_range.tflite differ diff --git a/mediapipe/modules/face_detection/face_detection_short_range_by_roi_cpu.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_by_roi_cpu.pbtxt new file mode 100644 index 0000000..b3adfeb --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_by_roi_cpu.pbtxt @@ -0,0 +1,83 @@ +# MediaPipe graph to detect faces. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeByRoiCpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:roi" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionShortRangeByRoiCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# ROI (region of interest) within the given image where faces should be +# detected. (NormalizedRect) +input_stream: "ROI:roi" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input CPU image (ImageFrame) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms specified region of image into 128x128 tensor keeping aspect ratio +# (padding tensor if needed). +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 128 + output_tensor_height: 128 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_short_range.tflite" + delegate { xnnpack {} } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionShortRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_short_range_by_roi_gpu.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_by_roi_gpu.pbtxt new file mode 100644 index 0000000..1bd08e9 --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_by_roi_gpu.pbtxt @@ -0,0 +1,83 @@ +# MediaPipe graph to detect faces. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeByRoiGpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:roi" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionShortRangeByRoiGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# ROI (region of interest) within the given image where faces should be +# detected. (NormalizedRect) +input_stream: "ROI:roi" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input GPU image (GpuBuffer) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms specified region of image into 128x128 tensor keeping aspect ratio +# (padding tensor if needed). +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 128 + output_tensor_height: 128 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_short_range.tflite" + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionShortRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_short_range_common.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_common.pbtxt new file mode 100644 index 0000000..4a6a54f --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_common.pbtxt @@ -0,0 +1,103 @@ +# MediaPipe graph performing common processing to detect faces, currently +# consisting of tensor post processing. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeCommon" +# input_stream: "TENSORS:detection_tensors" +# input_stream: "MATRIX:transform_matrix" +# output_stream: "DETECTIONS:detections" +# } + +type: "FaceDetectionShortRangeCommon" + +# Detection tensors. (std::vector) +input_stream: "TENSORS:detection_tensors" + +# A 4x4 row-major-order matrix that maps a point represented in the detection +# tensors to a desired coordinate system, e.g., in the original input image +# before scaling/cropping. (std::array) +input_stream: "MATRIX:transform_matrix" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 4 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_height: 128 + input_size_width: 128 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 8 + strides: 16 + strides: 16 + strides: 16 + aspect_ratios: 1.0 + fixed_anchor_size: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 896 + num_coords: 16 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 6 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + x_scale: 128.0 + y_scale: 128.0 + h_scale: 128.0 + w_scale: 128.0 + min_score_thresh: 0.5 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Projects the detections from input tensor to the corresponding locations on +# the original image (input to the graph). +node { + calculator: "DetectionProjectionCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "PROJECTION_MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt new file mode 100644 index 0000000..0db2420 --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_cpu.pbtxt @@ -0,0 +1,78 @@ +# MediaPipe graph to detect faces. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeCpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionShortRangeCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input CPU image (ImageFrame) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms the input image into a 128x128 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 128 + output_tensor_height: 128 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_short_range.tflite" + delegate { xnnpack {} } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionShortRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt new file mode 100644 index 0000000..d30644b --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_gpu.pbtxt @@ -0,0 +1,78 @@ +# MediaPipe graph to detect faces. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeGpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionShortRangeGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Converts the input GPU image (GpuBuffer) to the multi-backend image type +# (Image). +node: { + calculator: "ToImageCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "IMAGE:multi_backend_image" +} + +# Transforms the input image into a 128x128 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:multi_backend_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 128 + output_tensor_height: 128 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_short_range.tflite" + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionShortRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_detection/face_detection_short_range_image.pbtxt b/mediapipe/modules/face_detection/face_detection_short_range_image.pbtxt new file mode 100644 index 0000000..a259041 --- /dev/null +++ b/mediapipe/modules/face_detection/face_detection_short_range_image.pbtxt @@ -0,0 +1,94 @@ +# MediaPipe graph to detect faces. (GPU/CPU input, and inference is executed on +# GPU.) +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceDetectionShortRangeCpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:face_detections" +# } + +type: "FaceDetectionShortRangeCpu" + +# Image. (Image) +input_stream: "IMAGE:image" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" +# Detected faces. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:detections" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Transforms the input image into a 128x128 tensor while keeping the aspect +# ratio (what is expected by the corresponding face detection model), resulting +# in potential letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "TENSORS:input_tensors" + output_stream: "MATRIX:transform_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 128 + output_tensor_height: 128 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: CONVENTIONAL + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +# TODO: Use GraphOptions to modify the delegate field to be +# `delegate { xnnpack {} }` for the CPU only use cases. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/face_detection/face_detection_short_range.tflite" + + # + delegate: { gpu { use_advanced_gpu_api: true } } + } + } +} + +# Performs tensor post processing to generate face detections. +node { + calculator: "FaceDetectionShortRangeCommon" + input_stream: "TENSORS:detection_tensors" + input_stream: "MATRIX:transform_matrix" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/face_geometry/BUILD b/mediapipe/modules/face_geometry/BUILD new file mode 100644 index 0000000..c1f9967 --- /dev/null +++ b/mediapipe/modules/face_geometry/BUILD @@ -0,0 +1,137 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") +load("//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "face_geometry", + graph = "face_geometry.pbtxt", + register_as = "FaceGeometry", + deps = [ + ":geometry_pipeline_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_geometry_from_detection", + graph = "face_geometry_from_detection.pbtxt", + register_as = "FaceGeometryFromDetection", + deps = [ + ":geometry_pipeline_calculator", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/util:detection_to_landmarks_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_geometry_from_landmarks", + graph = "face_geometry_from_landmarks.pbtxt", + register_as = "FaceGeometryFromLandmarks", + deps = [ + ":geometry_pipeline_calculator", + ], +) + +mediapipe_proto_library( + name = "effect_renderer_calculator_proto", + srcs = ["effect_renderer_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + ], +) + +cc_library( + name = "effect_renderer_calculator", + srcs = ["effect_renderer_calculator.cc"], + deps = [ + ":effect_renderer_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:image_frame_opencv", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgcodecs", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "//mediapipe/gpu:gl_calculator_helper", + "//mediapipe/gpu:gpu_buffer", + "//mediapipe/modules/face_geometry/libs:effect_renderer", + "//mediapipe/modules/face_geometry/libs:validation_utils", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + "//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto", + "//mediapipe/modules/face_geometry/protos:mesh_3d_cc_proto", + "//mediapipe/util:resource_util", + "@com_google_absl//absl/types:optional", + ], + alwayslink = 1, +) + +mediapipe_proto_library( + name = "env_generator_calculator_proto", + srcs = ["env_generator_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/modules/face_geometry/protos:environment_proto", + ], +) + +cc_library( + name = "env_generator_calculator", + srcs = ["env_generator_calculator.cc"], + deps = [ + ":env_generator_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:status", + "//mediapipe/modules/face_geometry/libs:validation_utils", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + ], + alwayslink = 1, +) + +mediapipe_proto_library( + name = "geometry_pipeline_calculator_proto", + srcs = ["geometry_pipeline_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + ], +) + +cc_library( + name = "geometry_pipeline_calculator", + srcs = ["geometry_pipeline_calculator.cc"], + deps = [ + ":geometry_pipeline_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "//mediapipe/modules/face_geometry/libs:geometry_pipeline", + "//mediapipe/modules/face_geometry/libs:validation_utils", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + "//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto", + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_cc_proto", + "//mediapipe/util:resource_util", + "@com_google_absl//absl/memory", + ], + alwayslink = 1, +) diff --git a/mediapipe/modules/face_geometry/README.md b/mediapipe/modules/face_geometry/README.md new file mode 100644 index 0000000..8427ea6 --- /dev/null +++ b/mediapipe/modules/face_geometry/README.md @@ -0,0 +1,20 @@ +# face_geometry + +Protos|Details +:--- | :--- +[`face_geometry.Environment`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/environment.proto)| Describes an environment; includes the camera frame origin point location as well as virtual camera parameters. +[`face_geometry.GeometryPipelineMetadata`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto)| Describes metadata needed to estimate face geometry based on the face landmark module result. +[`face_geometry.FaceGeometry`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto)| Describes geometry data for a single face; includes a face mesh surface and a face pose in a given environment. +[`face_geometry.Mesh3d`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/mesh_3d.proto)| Describes a 3D mesh surface. + +Calculators|Details +:--- | :--- +[`FaceGeometryEnvGeneratorCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/env_generator_calculator.cc)| Generates an environment that describes a virtual scene. +[`FaceGeometryPipelineCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc)| Extracts face geometry for multiple faces from a vector of landmark lists. +[`FaceGeometryEffectRendererCalculator`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc)| Renders a face effect. + +Subgraphs|Details +:--- | :--- +[`FaceGeometryFromDetection`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt)| Extracts geometry from face detection for multiple faces. +[`FaceGeometryFromLandmarks`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt)| Extracts geometry from face landmarks for multiple faces. +[`FaceGeometry`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry.pbtxt)| Extracts geometry from face landmarks for multiple faces. Deprecated, please use `FaceGeometryFromLandmarks` in the new code. diff --git a/mediapipe/modules/face_geometry/data/BUILD b/mediapipe/modules/face_geometry/data/BUILD new file mode 100644 index 0000000..1661a22 --- /dev/null +++ b/mediapipe/modules/face_geometry/data/BUILD @@ -0,0 +1,59 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework:encode_binary_proto.bzl", "encode_binary_proto") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +encode_binary_proto( + name = "geometry_pipeline_metadata_detection", + input = "geometry_pipeline_metadata_detection.pbtxt", + message_type = "mediapipe.face_geometry.GeometryPipelineMetadata", + output = "geometry_pipeline_metadata_detection.binarypb", + deps = [ + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_proto", + ], +) + +encode_binary_proto( + name = "geometry_pipeline_metadata_landmarks", + input = "geometry_pipeline_metadata_landmarks.pbtxt", + message_type = "mediapipe.face_geometry.GeometryPipelineMetadata", + output = "geometry_pipeline_metadata_landmarks.binarypb", + deps = [ + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_proto", + ], +) + +# For backward-compatibility reasons, generate `geometry_pipeline_metadata.binarypb` from +# the `geometry_pipeline_metadata_landmarks.pbtxt` definition. +encode_binary_proto( + name = "geometry_pipeline_metadata", + input = "geometry_pipeline_metadata_landmarks.pbtxt", + message_type = "mediapipe.face_geometry.GeometryPipelineMetadata", + output = "geometry_pipeline_metadata.binarypb", + deps = [ + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_proto", + ], +) + +# These canonical face model files are not meant to be used in runtime, but rather for asset +# creation and/or reference. +exports_files([ + "canonical_face_model.fbx", + "canonical_face_model.obj", + "canonical_face_model_uv_visualization.png", +]) diff --git a/mediapipe/modules/face_geometry/data/canonical_face_model.fbx b/mediapipe/modules/face_geometry/data/canonical_face_model.fbx new file mode 100644 index 0000000..8e9d24a Binary files /dev/null and b/mediapipe/modules/face_geometry/data/canonical_face_model.fbx differ diff --git a/mediapipe/modules/face_geometry/data/canonical_face_model.obj b/mediapipe/modules/face_geometry/data/canonical_face_model.obj new file mode 100644 index 0000000..0e666d1 --- /dev/null +++ b/mediapipe/modules/face_geometry/data/canonical_face_model.obj @@ -0,0 +1,1834 @@ +v 0.000000 -3.406404 5.979507 +v 0.000000 -1.126865 7.475604 +v 0.000000 -2.089024 6.058267 +v -0.463928 0.955357 6.633583 +v 0.000000 -0.463170 7.586580 +v 0.000000 0.365669 7.242870 +v 0.000000 2.473255 5.788627 +v -4.253081 2.577646 3.279702 +v 0.000000 4.019042 5.284764 +v 0.000000 4.885979 5.385258 +v 0.000000 8.261778 4.481535 +v 0.000000 -3.706811 5.864924 +v 0.000000 -3.918301 5.569430 +v 0.000000 -3.994436 5.219482 +v 0.000000 -4.542400 5.404754 +v 0.000000 -4.745577 5.529457 +v 0.000000 -5.019567 5.601448 +v 0.000000 -5.365123 5.535441 +v 0.000000 -6.149624 5.071372 +v 0.000000 -1.501095 7.112196 +v -0.416106 -1.466449 6.447657 +v -7.087960 5.434801 0.099620 +v -2.628639 2.035898 3.848121 +v -3.198363 1.985815 3.796952 +v -3.775151 2.039402 3.646194 +v -4.465819 2.422950 3.155168 +v -2.164289 2.189867 3.851822 +v -3.208229 3.223926 4.115822 +v -2.673803 3.205337 4.092203 +v -3.745193 3.165286 3.972409 +v -4.161018 3.059069 3.719554 +v -5.062006 1.934418 2.776093 +v -2.266659 -7.425768 4.389812 +v -4.445859 2.663991 3.173422 +v -7.214530 2.263009 0.073150 +v -5.799793 2.349546 2.204059 +v -2.844939 -0.720868 4.433130 +v -0.711452 -3.329355 5.877044 +v -0.606033 -3.924562 5.444923 +v -1.431615 -3.500953 5.496189 +v -1.914910 -3.803146 5.028930 +v -1.131043 -3.973937 5.189648 +v -1.563548 -4.082763 4.842263 +v -2.650112 -5.003649 4.188483 +v -0.427049 -1.094134 7.360529 +v -0.496396 -0.475659 7.440358 +v -5.253307 3.881582 3.363159 +v -1.718698 0.974609 4.558359 +v -1.608635 -0.942516 5.814193 +v -1.651267 -0.610868 5.581319 +v -4.765501 -0.701554 3.534632 +v -0.478306 0.295766 7.101013 +v -3.734964 4.508230 4.550454 +v -4.588603 4.302037 4.048484 +v -6.279331 6.615427 1.425850 +v -1.220941 4.142165 5.106035 +v -2.193489 3.100317 4.000575 +v -3.102642 -4.352984 4.095905 +v -6.719682 -4.788645 -1.745401 +v -1.193824 -1.306795 5.737747 +v -0.729766 -1.593712 5.833208 +v -2.456206 -4.342621 4.283884 +v -2.204823 -4.304508 4.162499 +v -4.985894 4.802461 3.751977 +v -1.592294 -1.257709 5.456949 +v -2.644548 4.524654 4.921559 +v -2.760292 5.100971 5.015990 +v -3.523964 8.005976 3.729163 +v -5.599763 5.715470 2.724259 +v -3.063932 6.566144 4.529981 +v -5.720968 4.254584 2.830852 +v -6.374393 4.785590 1.591691 +v -0.672728 -3.688016 5.737804 +v -1.262560 -3.787691 5.417779 +v -1.732553 -3.952767 5.000579 +v -1.043625 -1.464973 5.662455 +v -2.321234 -4.329069 4.258156 +v -2.056846 -4.477671 4.520883 +v -2.153084 -4.276322 4.038093 +v -0.946874 -1.035249 6.512274 +v -1.469132 -4.036351 4.604908 +v -1.024340 -3.989851 4.926693 +v -0.533422 -3.993222 5.138202 +v -0.769720 -6.095394 4.985883 +v -0.699606 -5.291850 5.448304 +v -0.669687 -4.949770 5.509612 +v -0.630947 -4.695101 5.449371 +v -0.583218 -4.517982 5.339869 +v -1.537170 -4.423206 4.745470 +v -1.615600 -4.475942 4.813632 +v -1.729053 -4.618680 4.854463 +v -1.838624 -4.828746 4.823737 +v -2.368250 -3.106237 4.868096 +v -7.542244 -1.049282 -2.431321 +v 0.000000 -1.724003 6.601390 +v -1.826614 -4.399531 4.399021 +v -1.929558 -4.411831 4.497052 +v -0.597442 -2.013686 5.866456 +v -1.405627 -1.714196 5.241087 +v -0.662449 -1.819321 5.863759 +v -2.342340 0.572222 4.294303 +v -3.327324 0.104863 4.113860 +v -1.726175 -0.919165 5.273355 +v -5.133204 7.485602 2.660442 +v -4.538641 6.319907 3.683424 +v -3.986562 5.109487 4.466315 +v -2.169681 -5.440433 4.455874 +v -1.395634 5.011963 5.316032 +v -1.619500 6.599217 4.921106 +v -1.891399 8.236377 4.274997 +v -4.195832 2.235205 3.375099 +v -5.733342 1.411738 2.431726 +v -1.859887 2.355757 3.843181 +v -4.988612 3.074654 3.083858 +v -1.303263 1.416453 4.831091 +v -1.305757 -0.672779 6.415959 +v -6.465170 0.937119 1.689873 +v -5.258659 0.945811 2.974312 +v -4.432338 0.722096 3.522615 +v -3.300681 0.861641 3.872784 +v -2.430178 1.131492 4.039035 +v -1.820731 1.467954 4.224124 +v -0.563221 2.307693 5.566789 +v -6.338145 -0.529279 1.881175 +v -5.587698 3.208071 2.687839 +v -0.242624 -1.462857 7.071491 +v -1.611251 0.339326 4.895421 +v -7.743095 2.364999 -2.005167 +v -1.391142 1.851048 4.448999 +v -1.785794 -0.978284 4.850470 +v -4.670959 2.664461 3.084075 +v -1.333970 -0.283761 6.097047 +v -7.270895 -2.890917 -2.252455 +v -1.856432 2.585245 3.757904 +v -0.923388 0.073076 6.671944 +v -5.000589 -6.135128 1.892523 +v -5.085276 -7.178590 0.714711 +v -7.159291 -0.811820 -0.072044 +v -5.843051 -5.248023 0.924091 +v -6.847258 3.662916 0.724695 +v -2.412942 -8.258853 4.119213 +v -0.179909 -1.689864 6.573301 +v -2.103655 -0.163946 4.566119 +v -6.407571 2.236021 1.560843 +v -3.670075 2.360153 3.635230 +v -3.177186 2.294265 3.775704 +v -2.196121 -4.598322 4.479786 +v -6.234883 -1.944430 1.663542 +v -1.292924 -9.295920 4.094063 +v -3.210651 -8.533278 2.802001 +v -4.068926 -7.993109 1.925119 +v 0.000000 6.545390 5.027311 +v 0.000000 -9.403378 4.264492 +v -2.724032 2.315802 3.777151 +v -2.288460 2.398891 3.697603 +v -1.998311 2.496547 3.689148 +v -6.130040 3.399261 2.038516 +v -2.288460 2.886504 3.775031 +v -2.724032 2.961810 3.871767 +v -3.177186 2.964136 3.876973 +v -3.670075 2.927714 3.724325 +v -4.018389 2.857357 3.482983 +v -7.555811 4.106811 -0.991917 +v -4.018389 2.483695 3.440898 +v 0.000000 -2.521945 5.932265 +v -1.776217 -2.683946 5.213116 +v -1.222237 -1.182444 5.952465 +v -0.731493 -2.536683 5.815343 +v 0.000000 3.271027 5.236015 +v -4.135272 -6.996638 2.671970 +v -3.311811 -7.660815 3.382963 +v -1.313701 -8.639995 4.702456 +v -5.940524 -6.223629 -0.631468 +v -1.998311 2.743838 3.744030 +v -0.901447 1.236992 5.754256 +v 0.000000 -8.765243 4.891441 +v -2.308977 -8.974196 3.609070 +v -6.954154 -2.439843 -0.131163 +v -1.098819 -4.458788 5.120727 +v -1.181124 -4.579996 5.189564 +v -1.255818 -4.787901 5.237051 +v -1.325085 -5.106507 5.205010 +v -1.546388 -5.819392 4.757893 +v -1.953754 -4.183892 4.431713 +v -2.117802 -4.137093 4.555096 +v -2.285339 -4.051196 4.582438 +v -2.850160 -3.665720 4.484994 +v -5.278538 -2.238942 2.861224 +v -0.946709 1.907628 5.196779 +v -1.314173 3.104912 4.231404 +v -1.780000 2.860000 3.881555 +v -1.845110 -4.098880 4.247264 +v -5.436187 -4.030482 2.109852 +v -0.766444 3.182131 4.861453 +v -1.938616 -6.614410 4.521085 +v 0.000000 1.059413 6.774605 +v -0.516573 1.583572 6.148363 +v 0.000000 1.728369 6.316750 +v -1.246815 0.230297 5.681036 +v 0.000000 -7.942194 5.181173 +v 0.000000 -6.991499 5.153478 +v -0.997827 -6.930921 4.979576 +v -3.288807 -5.382514 3.795752 +v -2.311631 -1.566237 4.590085 +v -2.680250 -6.111567 4.096152 +v -3.832928 -1.537326 4.137731 +v -2.961860 -2.274215 4.440943 +v -4.386901 -2.683286 3.643886 +v -1.217295 -7.834465 4.969286 +v -1.542374 -0.136843 5.201008 +v -3.878377 -6.041764 3.311079 +v -3.084037 -6.809842 3.814195 +v -3.747321 -4.503545 3.726453 +v -6.094129 -3.205991 1.473482 +v -4.588995 -4.728726 2.983221 +v -6.583231 -3.941269 0.070268 +v -3.492580 -3.195820 4.130198 +v -1.255543 0.802341 5.307551 +v -1.126122 -0.933602 6.538785 +v -1.443109 -1.142774 5.905127 +v -0.923043 -0.529042 7.003423 +v -1.755386 3.529117 4.327696 +v -2.632589 3.713828 4.364629 +v -3.388062 3.721976 4.309028 +v -4.075766 3.675413 4.076063 +v -4.622910 3.474691 3.646321 +v -5.171755 2.535753 2.670867 +v -7.297331 0.763172 -0.048769 +v -4.706828 1.651000 3.109532 +v -4.071712 1.476821 3.476944 +v -3.269817 1.470659 3.731945 +v -2.527572 1.617311 3.865444 +v -1.970894 1.858505 3.961782 +v -1.579543 2.097941 4.084996 +v -7.664182 0.673132 -2.435867 +v -1.397041 -1.340139 5.630378 +v -0.884838 0.658740 6.233232 +v -0.767097 -0.968035 7.077932 +v -0.460213 -1.334106 6.787447 +v -0.748618 -1.067994 6.798303 +v -1.236408 -1.585568 5.480490 +v -0.387306 -1.409990 6.957705 +v -0.319925 -1.607931 6.508676 +v -1.639633 2.556298 3.863736 +v -1.255645 2.467144 4.203800 +v -1.031362 2.382663 4.615849 +v -4.253081 2.772296 3.315305 +v -4.530000 2.910000 3.339685 +v 0.463928 0.955357 6.633583 +v 4.253081 2.577646 3.279702 +v 0.416106 -1.466449 6.447657 +v 7.087960 5.434801 0.099620 +v 2.628639 2.035898 3.848121 +v 3.198363 1.985815 3.796952 +v 3.775151 2.039402 3.646194 +v 4.465819 2.422950 3.155168 +v 2.164289 2.189867 3.851822 +v 3.208229 3.223926 4.115822 +v 2.673803 3.205337 4.092203 +v 3.745193 3.165286 3.972409 +v 4.161018 3.059069 3.719554 +v 5.062006 1.934418 2.776093 +v 2.266659 -7.425768 4.389812 +v 4.445859 2.663991 3.173422 +v 7.214530 2.263009 0.073150 +v 5.799793 2.349546 2.204059 +v 2.844939 -0.720868 4.433130 +v 0.711452 -3.329355 5.877044 +v 0.606033 -3.924562 5.444923 +v 1.431615 -3.500953 5.496189 +v 1.914910 -3.803146 5.028930 +v 1.131043 -3.973937 5.189648 +v 1.563548 -4.082763 4.842263 +v 2.650112 -5.003649 4.188483 +v 0.427049 -1.094134 7.360529 +v 0.496396 -0.475659 7.440358 +v 5.253307 3.881582 3.363159 +v 1.718698 0.974609 4.558359 +v 1.608635 -0.942516 5.814193 +v 1.651267 -0.610868 5.581319 +v 4.765501 -0.701554 3.534632 +v 0.478306 0.295766 7.101013 +v 3.734964 4.508230 4.550454 +v 4.588603 4.302037 4.048484 +v 6.279331 6.615427 1.425850 +v 1.220941 4.142165 5.106035 +v 2.193489 3.100317 4.000575 +v 3.102642 -4.352984 4.095905 +v 6.719682 -4.788645 -1.745401 +v 1.193824 -1.306795 5.737747 +v 0.729766 -1.593712 5.833208 +v 2.456206 -4.342621 4.283884 +v 2.204823 -4.304508 4.162499 +v 4.985894 4.802461 3.751977 +v 1.592294 -1.257709 5.456949 +v 2.644548 4.524654 4.921559 +v 2.760292 5.100971 5.015990 +v 3.523964 8.005976 3.729163 +v 5.599763 5.715470 2.724259 +v 3.063932 6.566144 4.529981 +v 5.720968 4.254584 2.830852 +v 6.374393 4.785590 1.591691 +v 0.672728 -3.688016 5.737804 +v 1.262560 -3.787691 5.417779 +v 1.732553 -3.952767 5.000579 +v 1.043625 -1.464973 5.662455 +v 2.321234 -4.329069 4.258156 +v 2.056846 -4.477671 4.520883 +v 2.153084 -4.276322 4.038093 +v 0.946874 -1.035249 6.512274 +v 1.469132 -4.036351 4.604908 +v 1.024340 -3.989851 4.926693 +v 0.533422 -3.993222 5.138202 +v 0.769720 -6.095394 4.985883 +v 0.699606 -5.291850 5.448304 +v 0.669687 -4.949770 5.509612 +v 0.630947 -4.695101 5.449371 +v 0.583218 -4.517982 5.339869 +v 1.537170 -4.423206 4.745470 +v 1.615600 -4.475942 4.813632 +v 1.729053 -4.618680 4.854463 +v 1.838624 -4.828746 4.823737 +v 2.368250 -3.106237 4.868096 +v 7.542244 -1.049282 -2.431321 +v 1.826614 -4.399531 4.399021 +v 1.929558 -4.411831 4.497052 +v 0.597442 -2.013686 5.866456 +v 1.405627 -1.714196 5.241087 +v 0.662449 -1.819321 5.863759 +v 2.342340 0.572222 4.294303 +v 3.327324 0.104863 4.113860 +v 1.726175 -0.919165 5.273355 +v 5.133204 7.485602 2.660442 +v 4.538641 6.319907 3.683424 +v 3.986562 5.109487 4.466315 +v 2.169681 -5.440433 4.455874 +v 1.395634 5.011963 5.316032 +v 1.619500 6.599217 4.921106 +v 1.891399 8.236377 4.274997 +v 4.195832 2.235205 3.375099 +v 5.733342 1.411738 2.431726 +v 1.859887 2.355757 3.843181 +v 4.988612 3.074654 3.083858 +v 1.303263 1.416453 4.831091 +v 1.305757 -0.672779 6.415959 +v 6.465170 0.937119 1.689873 +v 5.258659 0.945811 2.974312 +v 4.432338 0.722096 3.522615 +v 3.300681 0.861641 3.872784 +v 2.430178 1.131492 4.039035 +v 1.820731 1.467954 4.224124 +v 0.563221 2.307693 5.566789 +v 6.338145 -0.529279 1.881175 +v 5.587698 3.208071 2.687839 +v 0.242624 -1.462857 7.071491 +v 1.611251 0.339326 4.895421 +v 7.743095 2.364999 -2.005167 +v 1.391142 1.851048 4.448999 +v 1.785794 -0.978284 4.850470 +v 4.670959 2.664461 3.084075 +v 1.333970 -0.283761 6.097047 +v 7.270895 -2.890917 -2.252455 +v 1.856432 2.585245 3.757904 +v 0.923388 0.073076 6.671944 +v 5.000589 -6.135128 1.892523 +v 5.085276 -7.178590 0.714711 +v 7.159291 -0.811820 -0.072044 +v 5.843051 -5.248023 0.924091 +v 6.847258 3.662916 0.724695 +v 2.412942 -8.258853 4.119213 +v 0.179909 -1.689864 6.573301 +v 2.103655 -0.163946 4.566119 +v 6.407571 2.236021 1.560843 +v 3.670075 2.360153 3.635230 +v 3.177186 2.294265 3.775704 +v 2.196121 -4.598322 4.479786 +v 6.234883 -1.944430 1.663542 +v 1.292924 -9.295920 4.094063 +v 3.210651 -8.533278 2.802001 +v 4.068926 -7.993109 1.925119 +v 2.724032 2.315802 3.777151 +v 2.288460 2.398891 3.697603 +v 1.998311 2.496547 3.689148 +v 6.130040 3.399261 2.038516 +v 2.288460 2.886504 3.775031 +v 2.724032 2.961810 3.871767 +v 3.177186 2.964136 3.876973 +v 3.670075 2.927714 3.724325 +v 4.018389 2.857357 3.482983 +v 7.555811 4.106811 -0.991917 +v 4.018389 2.483695 3.440898 +v 1.776217 -2.683946 5.213116 +v 1.222237 -1.182444 5.952465 +v 0.731493 -2.536683 5.815343 +v 4.135272 -6.996638 2.671970 +v 3.311811 -7.660815 3.382963 +v 1.313701 -8.639995 4.702456 +v 5.940524 -6.223629 -0.631468 +v 1.998311 2.743838 3.744030 +v 0.901447 1.236992 5.754256 +v 2.308977 -8.974196 3.609070 +v 6.954154 -2.439843 -0.131163 +v 1.098819 -4.458788 5.120727 +v 1.181124 -4.579996 5.189564 +v 1.255818 -4.787901 5.237051 +v 1.325085 -5.106507 5.205010 +v 1.546388 -5.819392 4.757893 +v 1.953754 -4.183892 4.431713 +v 2.117802 -4.137093 4.555096 +v 2.285339 -4.051196 4.582438 +v 2.850160 -3.665720 4.484994 +v 5.278538 -2.238942 2.861224 +v 0.946709 1.907628 5.196779 +v 1.314173 3.104912 4.231404 +v 1.780000 2.860000 3.881555 +v 1.845110 -4.098880 4.247264 +v 5.436187 -4.030482 2.109852 +v 0.766444 3.182131 4.861453 +v 1.938616 -6.614410 4.521085 +v 0.516573 1.583572 6.148363 +v 1.246815 0.230297 5.681036 +v 0.997827 -6.930921 4.979576 +v 3.288807 -5.382514 3.795752 +v 2.311631 -1.566237 4.590085 +v 2.680250 -6.111567 4.096152 +v 3.832928 -1.537326 4.137731 +v 2.961860 -2.274215 4.440943 +v 4.386901 -2.683286 3.643886 +v 1.217295 -7.834465 4.969286 +v 1.542374 -0.136843 5.201008 +v 3.878377 -6.041764 3.311079 +v 3.084037 -6.809842 3.814195 +v 3.747321 -4.503545 3.726453 +v 6.094129 -3.205991 1.473482 +v 4.588995 -4.728726 2.983221 +v 6.583231 -3.941269 0.070268 +v 3.492580 -3.195820 4.130198 +v 1.255543 0.802341 5.307551 +v 1.126122 -0.933602 6.538785 +v 1.443109 -1.142774 5.905127 +v 0.923043 -0.529042 7.003423 +v 1.755386 3.529117 4.327696 +v 2.632589 3.713828 4.364629 +v 3.388062 3.721976 4.309028 +v 4.075766 3.675413 4.076063 +v 4.622910 3.474691 3.646321 +v 5.171755 2.535753 2.670867 +v 7.297331 0.763172 -0.048769 +v 4.706828 1.651000 3.109532 +v 4.071712 1.476821 3.476944 +v 3.269817 1.470659 3.731945 +v 2.527572 1.617311 3.865444 +v 1.970894 1.858505 3.961782 +v 1.579543 2.097941 4.084996 +v 7.664182 0.673132 -2.435867 +v 1.397041 -1.340139 5.630378 +v 0.884838 0.658740 6.233232 +v 0.767097 -0.968035 7.077932 +v 0.460213 -1.334106 6.787447 +v 0.748618 -1.067994 6.798303 +v 1.236408 -1.585568 5.480490 +v 0.387306 -1.409990 6.957705 +v 0.319925 -1.607931 6.508676 +v 1.639633 2.556298 3.863736 +v 1.255645 2.467144 4.203800 +v 1.031362 2.382663 4.615849 +v 4.253081 2.772296 3.315305 +v 4.530000 2.910000 3.339685 +vt 0.427942 0.304722 +vt 0.526878 0.295374 +vt 0.444832 0.269206 +vt 0.607600 0.322297 +vt 0.377046 0.677222 +vt 0.473033 0.304722 +vt 0.526913 0.282143 +vt 0.447112 0.284192 +vt 0.599262 0.318931 +vt 0.414712 0.664780 +vt 0.473122 0.295374 +vt 0.527671 0.263774 +vt 0.448020 0.295368 +vt 0.593203 0.314324 +vt 0.467288 0.470075 +vt 0.473087 0.282143 +vt 0.534090 0.220859 +vt 0.448662 0.304722 +vt 0.569944 0.232965 +vt 0.437114 0.441104 +vt 0.472329 0.263774 +vt 0.524613 0.307634 +vt 0.114210 0.384978 +vt 0.555168 0.269206 +vt 0.455528 0.451377 +vt 0.465828 0.220810 +vt 0.547818 0.307634 +vt 0.375437 0.075808 +vt 0.552888 0.284192 +vt 0.429884 0.533478 +vt 0.475387 0.307634 +vt 0.568842 0.307634 +vt 0.499877 0.091010 +vt 0.551980 0.295368 +vt 0.336768 0.355267 +vt 0.452182 0.307634 +vt 0.539958 0.442861 +vt 0.455607 0.548199 +vt 0.551338 0.304722 +vt 0.133823 0.317299 +vt 0.431158 0.307634 +vt 0.596371 0.306047 +vt 0.408772 0.626106 +vt 0.885770 0.384971 +vt 0.279777 0.285342 +vt 0.460042 0.442861 +vt 0.596961 0.293460 +vt 0.128294 0.208059 +vt 0.624563 0.075808 +vt 0.189096 0.353700 +vt 0.403629 0.306047 +vt 0.611897 0.306039 +vt 0.440512 0.097581 +vt 0.544341 0.548416 +vt 0.324548 0.296007 +vt 0.403039 0.293460 +vt 0.554692 0.419934 +vt 0.335279 0.147180 +vt 0.591234 0.626106 +vt 0.354128 0.187447 +vt 0.388103 0.306039 +vt 0.577238 0.326110 +vt 0.288719 0.180054 +vt 0.871706 0.208059 +vt 0.445308 0.419934 +vt 0.553172 0.331473 +vt 0.499923 0.648476 +vt 0.559100 0.097368 +vt 0.422762 0.326110 +vt 0.527121 0.333802 +vt 0.465844 0.379359 +vt 0.664630 0.147129 +vt 0.446828 0.331473 +vt 0.826722 0.721245 +vt 0.445682 0.433923 +vt 0.711218 0.180025 +vt 0.472879 0.333802 +vt 0.770391 0.700444 +vt 0.415838 0.375804 +vt 0.534154 0.379360 +vt 0.173287 0.721252 +vt 0.635536 0.810751 +vt 0.499988 0.381566 +vt 0.554318 0.433923 +vt 0.229622 0.700459 +vt 0.770092 0.767979 +vt 0.301415 0.612551 +vt 0.584177 0.375893 +vt 0.364501 0.810886 +vt 0.668509 0.880086 +vt 0.058133 0.680924 +vt 0.698585 0.612551 +vt 0.229924 0.767997 +vt 0.616907 0.744114 +vt 0.301415 0.636844 +vt 0.941867 0.680924 +vt 0.331431 0.880286 +vt 0.614083 0.718613 +vt 0.318785 0.641660 +vt 0.698585 0.636844 +vt 0.383103 0.744160 +vt 0.577414 0.436833 +vt 0.343364 0.644643 +vt 0.681215 0.641660 +vt 0.385919 0.718636 +vt 0.722943 0.728037 +vt 0.365962 0.644029 +vt 0.656636 0.644643 +vt 0.422552 0.436767 +vt 0.607591 0.305797 +vt 0.388665 0.637716 +vt 0.634038 0.644029 +vt 0.277076 0.728068 +vt 0.618026 0.305289 +vt 0.194993 0.657898 +vt 0.611335 0.637716 +vt 0.392389 0.305797 +vt 0.542902 0.415208 +vt 0.410373 0.608920 +vt 0.805016 0.657892 +vt 0.381974 0.305289 +vt 0.557261 0.427174 +vt 0.393207 0.604463 +vt 0.589660 0.608938 +vt 0.457098 0.415208 +vt 0.932695 0.269895 +vt 0.366170 0.601178 +vt 0.606793 0.604463 +vt 0.442739 0.427174 +vt 0.645429 0.303293 +vt 0.499977 0.045547 +vt 0.633830 0.601178 +vt 0.067305 0.269895 +vt 0.607610 0.646112 +vt 0.500023 0.809424 +vt 0.733752 0.130299 +vt 0.354490 0.303216 +vt 0.552386 0.697432 +vt 0.266248 0.130299 +vt 0.681008 0.101715 +vt 0.392390 0.646112 +vt 0.830705 0.806186 +vt 0.318993 0.101715 +vt 0.568013 0.055435 +vt 0.447580 0.697390 +vt 0.703624 0.706729 +vt 0.430987 0.055935 +vt 0.812086 0.411461 +vt 0.169295 0.806186 +vt 0.662801 0.717082 +vt 0.187885 0.411462 +vt 0.603900 0.289783 +vt 0.296392 0.706757 +vt 0.516446 0.500361 +vt 0.396100 0.289783 +vt 0.656636 0.599403 +vt 0.337212 0.717117 +vt 0.723330 0.636627 +vt 0.723087 0.467946 +vt 0.343364 0.599403 +vt 0.681215 0.603765 +vt 0.483370 0.500413 +vt 0.710288 0.631747 +vt 0.578632 0.466377 +vt 0.318785 0.603765 +vt 0.825608 0.602325 +vt 0.276896 0.467943 +vt 0.549756 0.600249 +vt 0.570338 0.451425 +vt 0.174399 0.602329 +vt 0.617942 0.491684 +vt 0.421352 0.466259 +vt 0.560698 0.604668 +vt 0.598631 0.545021 +vt 0.382385 0.491427 +vt 0.508953 0.420562 +vt 0.429819 0.451385 +vt 0.573595 0.610193 +vt 0.742247 0.685493 +vt 0.490967 0.420622 +vt 0.614074 0.116754 +vt 0.401223 0.544828 +vt 0.517472 0.422123 +vt 0.515097 0.472748 +vt 0.385764 0.116846 +vt 0.865595 0.666313 +vt 0.257765 0.685510 +vt 0.516311 0.436946 +vt 0.513050 0.452718 +vt 0.134410 0.666317 +vt 0.816351 0.259740 +vt 0.485301 0.472605 +vt 0.566036 0.417671 +vt 0.624852 0.271901 +vt 0.183610 0.259743 +vt 0.892441 0.459239 +vt 0.486717 0.452371 +vt 0.531529 0.444943 +vt 0.571228 0.317308 +vt 0.107550 0.459245 +vt 0.801779 0.168062 +vt 0.374971 0.272195 +vt 0.523913 0.436170 +vt 0.549626 0.319139 +vt 0.198221 0.168062 +vt 0.760966 0.220247 +vt 0.428771 0.317309 +vt 0.526564 0.453882 +vt 0.585384 0.333459 +vt 0.238979 0.220255 +vt 0.537728 0.494615 +vt 0.450374 0.319139 +vt 0.541366 0.521101 +vt 0.560215 0.342771 +vt 0.462783 0.494253 +vt 0.580985 0.612840 +vt 0.414617 0.333459 +vt 0.567192 0.430580 +vt 0.525850 0.319809 +vt 0.419054 0.612845 +vt 0.967686 0.355643 +vt 0.439785 0.342771 +vt 0.992440 0.519223 +vt 0.528249 0.349596 +vt 0.032314 0.355643 +vt 0.560611 0.480983 +vt 0.474155 0.319808 +vt 0.579658 0.590055 +vt 0.643998 0.465512 +vt 0.439121 0.481042 +vt 0.733530 0.623023 +vt 0.471751 0.349596 +vt 0.603876 0.583413 +vt 0.790082 0.608646 +vt 0.266470 0.623023 +vt 0.602995 0.451312 +vt 0.355808 0.465594 +vt 0.633505 0.573912 +vt 0.893693 0.600040 +vt 0.396993 0.451203 +vt 0.573500 0.580000 +vt 0.209925 0.608647 +vt 0.666525 0.566134 +vt 0.719902 0.624400 +vt 0.426243 0.579569 +vt 0.980531 0.598436 +vt 0.106310 0.600044 +vt 0.702114 0.566837 +vt 0.602918 0.157137 +vt 0.019469 0.598436 +vt 0.595293 0.514976 +vt 0.280098 0.624400 +vt 0.732392 0.575453 +vt 0.752212 0.589195 +vt 0.404670 0.514867 +vt 0.509127 0.437282 +vt 0.396889 0.157245 +vt 0.897013 0.531231 +vt 0.702097 0.646409 +vt 0.490726 0.437599 +vt 0.771046 0.651041 +vt 0.247792 0.589190 +vt 0.758757 0.617213 +vt 0.680678 0.652735 +vt 0.228962 0.651049 +vt 0.810748 0.476074 +vt 0.297903 0.646409 +vt 0.716482 0.666799 +vt 0.629906 0.653924 +vt 0.189241 0.476076 +vt 0.523481 0.594373 +vt 0.319322 0.652735 +vt 0.687132 0.677654 +vt 0.654766 0.655989 +vt 0.476410 0.594194 +vt 0.600862 0.567527 +vt 0.370094 0.653924 +vt 0.655896 0.679837 +vt 0.606630 0.596295 +vt 0.398964 0.567345 +vt 0.631101 0.552846 +vt 0.345234 0.655989 +vt 0.622953 0.677221 +vt 0.725342 0.610869 +vt 0.368756 0.552793 +vt 0.667113 0.539327 +vt 0.393362 0.596294 +vt 0.585271 0.664823 +vt 0.688880 0.590540 +vt 0.332828 0.539288 +vt 0.713757 0.532373 +vt 0.274658 0.610869 +vt 0.531987 0.469860 +vt 0.661242 0.586975 +vt 0.286267 0.532325 +vt 0.752702 0.542818 +vt 0.311120 0.590540 +vt 0.562759 0.441215 +vt 0.634070 0.590424 +vt 0.247308 0.542806 +vt 0.821442 0.542444 +vt 0.313951 0.224692 +vt 0.338758 0.586975 +vt 0.544562 0.451624 +vt 0.895093 0.745859 +vt 0.178560 0.542446 +vt 0.551868 0.463430 +vt 0.410986 0.491277 +vt 0.365930 0.590424 +vt 0.570082 0.533674 +vt 0.526227 0.426090 +vt 0.448340 0.463064 +vt 0.572156 0.562348 +vt 0.447750 0.137523 +vt 0.104907 0.745859 +vt 0.663187 0.355403 +vt 0.710288 0.619236 +vt 0.427685 0.562039 +vt 0.742870 0.644554 +vt 0.295284 0.378419 +vt 0.473773 0.426090 +vt 0.866152 0.317295 +vt 0.517862 0.528052 +vt 0.257135 0.644560 +vt 0.587247 0.601068 +vt 0.357155 0.395730 +vt 0.499816 0.437019 +vt 0.720122 0.285333 +vt 0.276670 0.636627 +vt 0.412782 0.601030 +vt 0.781070 0.564595 +vt 0.319688 0.429262 +vt 0.499968 0.218629 +vt 0.810858 0.353695 +vt 0.289712 0.631747 +vt 0.218937 0.564589 +vt 0.711045 0.601048 +vt 0.374293 0.219815 +vt 0.499977 0.262981 +vt 0.675343 0.296022 +vt 0.450067 0.599566 +vt 0.288955 0.601048 +vt 0.588166 0.890956 +vt 0.378909 0.425990 +vt 0.499977 0.280615 +vt 0.645735 0.187360 +vt 0.438999 0.603505 +vt 0.412198 0.891099 +vt 0.570304 0.812129 +vt 0.344549 0.254561 +vt 0.499977 0.294066 +vt 0.685945 0.224643 +vt 0.426450 0.610201 +vt 0.429765 0.812166 +vt 0.558266 0.738328 +vt 0.456549 0.180799 +vt 0.499977 0.304722 +vt 0.589072 0.491363 +vt 0.482483 0.422151 +vt 0.441728 0.738324 +vt 0.600409 0.250995 +vt 0.499913 0.178271 +vt 0.500023 0.307652 +vt 0.552012 0.137408 +vt 0.483518 0.437016 +vt 0.399510 0.251079 +vt 0.672684 0.743419 +vt 0.499886 0.133083 +vt 0.500016 0.320776 +vt 0.704663 0.378470 +vt 0.433991 0.417638 +vt 0.327338 0.743473 +vt 0.709250 0.798492 +vt 0.432112 0.506411 +vt 0.500023 0.333766 +vt 0.642764 0.395662 +vt 0.468472 0.444943 +vt 0.290777 0.798554 +vt 0.757824 0.852324 +vt 0.499974 0.560363 +vt 0.500023 0.892950 +vt 0.680198 0.429281 +vt 0.476088 0.436170 +vt 0.242176 0.852324 +vt 0.588354 0.453138 +vt 0.479154 0.557346 +vt 0.499987 0.730081 +vt 0.625560 0.219688 +vt 0.473466 0.454256 +vt 0.411671 0.453035 +vt 0.665586 0.504049 +vt 0.499989 0.530175 +vt 0.499955 0.687602 +vt 0.621009 0.425982 +vt 0.458639 0.520911 +vt 0.334562 0.503927 +vt 0.627543 0.526648 +vt 0.411362 0.195673 +vt 0.289712 0.619236 +vt 0.655317 0.254485 +vt 0.432949 0.430482 +vt 0.372120 0.526586 +vt 0.536915 0.406214 +vt 0.468268 0.647329 +vt 0.499523 0.598938 +vt 0.543283 0.180745 +vt 0.007561 0.519223 +vt 0.463080 0.406216 +vt 0.577268 0.414065 +vt 0.228018 0.316428 +vt 0.499910 0.501747 +vt 0.567985 0.506521 +vt 0.420121 0.589772 +vt 0.422729 0.414015 +vt 0.531915 0.398463 +vt 0.413386 0.307634 +vt 0.500151 0.472844 +vt 0.520797 0.557435 +vt 0.396012 0.583304 +vt 0.468080 0.398465 +vt 0.590372 0.298177 +vt 0.416164 0.631286 +vt 0.482113 0.528021 +vt 0.588371 0.195559 +vt 0.366427 0.573884 +vt 0.409626 0.298177 +vt 0.586800 0.304600 +vt 0.436392 0.640113 +vt 0.499974 0.397628 +vt 0.531597 0.647517 +vt 0.333434 0.566122 +vt 0.413200 0.304600 +vt 0.986046 0.439966 +vt 0.452770 0.579150 +vt 0.500026 0.452513 +vt 0.771915 0.316422 +vt 0.297879 0.566824 +vt 0.499914 0.419853 +vt 0.609945 0.360090 +vt 0.247923 0.398667 +vt 0.499977 0.347466 +vt 0.586614 0.307634 +vt 0.267612 0.575440 +vt 0.013954 0.439966 +vt 0.581691 0.279937 +vt 0.367856 0.336081 +vt 0.583841 0.631286 +vt 0.102986 0.531237 +vt 0.390095 0.360427 +vt 0.576838 0.288154 +vt 0.392400 0.322297 +vt 0.563544 0.640172 +vt 0.241246 0.617214 +vt 0.418309 0.279937 +vt 0.573521 0.296460 +vt 0.400738 0.318931 +vt 0.547226 0.579605 +vt 0.283526 0.666810 +vt 0.423162 0.288154 +vt 0.572058 0.304722 +vt 0.406787 0.314327 +vt 0.752033 0.398685 +vt 0.312876 0.677668 +vt 0.426479 0.296460 +vt 0.526967 0.304722 +vt 0.430012 0.233191 +vt 0.631938 0.336500 +vt 0.344108 0.679849 +f 174/43 156/119 134/220 +f 247/335 34/252 8/399 +f 383/124 399/59 363/216 +f 264/244 467/163 250/317 +f 309/42 416/442 325/427 +f 79/51 96/432 192/416 +f 357/246 390/96 265/239 +f 128/250 35/247 163/91 +f 369/186 265/239 390/96 +f 140/190 163/91 35/247 +f 268/224 1/441 303/70 +f 38/232 73/77 1/441 +f 12/375 303/70 1/441 +f 12/375 1/441 73/77 +f 350/281 452/238 351/276 +f 121/285 122/280 232/425 +f 453/233 351/276 452/238 +f 233/419 232/425 122/280 +f 268/224 303/70 270/214 +f 38/232 40/222 73/77 +f 304/66 270/214 303/70 +f 74/73 73/77 40/222 +f 358/241 344/313 351/276 +f 129/245 122/280 115/318 +f 278/174 351/276 344/313 +f 48/182 115/318 122/280 +f 351/276 453/233 358/241 +f 122/280 129/245 233/419 +f 454/228 358/241 453/233 +f 234/413 233/419 129/245 +f 300/82 334/373 298/90 +f 70/89 68/97 105/378 +f 333/379 298/90 334/373 +f 104/384 105/378 68/97 +f 176/33 153/131 397/68 +f 176/33 172/53 153/131 +f 378/144 397/68 153/131 +f 149/147 153/131 172/53 +f 382/128 385/116 383/124 +f 155/123 156/119 158/111 +f 399/59 383/124 385/116 +f 174/43 158/111 156/119 +f 281/159 348/291 331/391 +f 51/167 102/396 119/295 +f 349/286 331/391 348/291 +f 120/290 119/295 102/396 +f 270/214 304/66 271/209 +f 40/222 41/217 74/73 +f 305/62 271/209 304/66 +f 75/69 74/73 41/217 +f 10/387 337/355 152/135 +f 10/387 152/135 108/360 +f 338/349 152/135 337/355 +f 109/354 108/360 152/135 +f 345/307 279/169 361/226 +f 116/312 132/230 49/177 +f 280/164 361/226 279/169 +f 50/172 49/177 132/230 +f 263/249 432/346 419/424 +f 33/257 195/398 212/60 +f 425/388 419/424 432/346 +f 205/338 212/60 195/398 +f 305/62 409/9 271/209 +f 75/69 41/217 185/456 +f 410/4 271/209 409/9 +f 186/451 185/456 41/217 +f 273/199 311/32 408/14 +f 43/207 184/461 81/41 +f 416/442 408/14 311/32 +f 192/416 81/41 184/461 +f 323/439 271/209 411/467 +f 93/449 187/446 41/217 +f 410/4 411/467 271/209 +f 186/451 41/217 187/446 +f 348/291 450/248 349/286 +f 119/295 120/290 230/437 +f 451/243 349/286 450/248 +f 231/431 230/437 120/290 +f 435/328 433/340 431/352 +f 215/45 211/302 213/55 +f 423/400 431/352 433/340 +f 203/350 213/55 211/302 +f 314/17 315/12 19/333 +f 84/26 19/333 85/21 +f 18/339 19/333 315/12 +f 18/339 85/21 19/333 +f 308/47 376/152 307/52 +f 78/56 77/61 147/155 +f 292/114 307/52 376/152 +f 62/121 147/155 77/61 +f 260/264 388/104 261/259 +f 30/272 31/267 161/99 +f 389/100 261/259 388/104 +f 162/95 161/99 31/267 +f 287/134 415/447 385/116 +f 57/141 158/111 191/422 +f 399/59 385/116 415/447 +f 174/43 191/422 158/111 +f 419/424 425/388 407/19 +f 195/398 183/466 205/338 +f 336/361 407/19 425/388 +f 107/366 205/338 183/466 +f 368/191 417/436 365/206 +f 139/195 136/210 193/410 +f 435/328 365/206 417/436 +f 215/45 193/410 136/210 +f 392/88 424/394 328/409 +f 166/79 99/414 204/344 +f 359/236 328/409 424/394 +f 130/240 204/344 99/414 +f 299/86 302/74 285/142 +f 69/93 55/149 72/81 +f 252/305 285/142 302/74 +f 22/315 72/81 55/149 +f 5/417 276/184 6/411 +f 5/417 6/411 46/192 +f 282/154 6/411 276/184 +f 52/162 46/192 6/411 +f 255/289 374/161 254/294 +f 25/297 24/303 145/165 +f 375/156 254/294 374/161 +f 146/160 145/165 24/303 +f 321/450 322/445 308/47 +f 91/459 78/56 92/454 +f 376/152 308/47 322/445 +f 147/155 92/454 78/56 +f 281/159 426/382 412/462 +f 51/167 188/440 206/332 +f 428/370 412/462 426/382 +f 208/320 206/332 188/440 +f 422/406 314/17 201/362 +f 202/356 201/362 84/26 +f 19/333 201/362 314/17 +f 19/333 84/26 201/362 +f 336/361 322/445 407/19 +f 107/366 183/466 92/454 +f 406/24 407/19 322/445 +f 182/3 92/454 183/466 +f 406/24 322/445 405/29 +f 182/3 181/8 92/454 +f 321/450 405/29 322/445 +f 91/459 92/454 181/8 +f 18/339 315/12 17/345 +f 18/339 17/345 85/21 +f 316/7 17/345 315/12 +f 86/16 85/21 17/345 +f 426/382 267/229 427/376 +f 206/332 207/326 37/237 +f 424/394 427/376 267/229 +f 204/344 37/237 207/326 +f 370/181 397/68 401/49 +f 141/185 177/28 172/53 +f 378/144 401/49 397/68 +f 149/147 172/53 177/28 +f 392/88 270/214 323/439 +f 166/79 93/449 40/222 +f 271/209 323/439 270/214 +f 41/217 40/222 93/449 +f 418/430 466/168 414/452 +f 194/404 190/428 246/341 +f 465/173 414/452 466/168 +f 245/347 246/341 190/428 +f 258/274 259/269 387/108 +f 28/282 160/103 29/277 +f 386/112 387/108 259/269 +f 159/107 29/277 160/103 +f 261/259 389/100 468/158 +f 31/267 248/329 162/95 +f 467/163 468/158 389/100 +f 247/335 162/95 248/329 +f 249/323 457/213 420/418 +f 4/423 197/386 237/395 +f 400/54 420/418 457/213 +f 175/38 237/395 197/386 +f 334/373 299/86 333/379 +f 105/378 104/384 69/93 +f 285/142 333/379 299/86 +f 55/149 69/93 104/384 +f 286/138 9/393 418/430 +f 56/145 194/404 9/393 +f 169/67 418/430 9/393 +f 169/67 9/393 194/404 +f 341/331 262/254 347/296 +f 112/336 118/300 32/262 +f 449/253 347/296 262/254 +f 229/443 32/262 118/300 +f 286/138 418/430 442/288 +f 56/145 222/10 194/404 +f 414/452 442/288 418/430 +f 190/428 194/404 222/10 +f 328/409 461/193 327/415 +f 99/414 98/420 241/371 +f 329/403 327/415 461/193 +f 100/408 241/371 98/420 +f 278/174 356/251 330/397 +f 48/182 101/402 127/255 +f 372/171 330/397 356/251 +f 143/175 127/255 101/402 +f 310/37 393/84 439/304 +f 80/46 219/25 167/75 +f 440/298 439/304 393/84 +f 220/20 167/75 219/25 +f 382/128 383/124 257/279 +f 155/123 27/287 156/119 +f 342/325 257/279 383/124 +f 113/330 156/119 27/287 +f 361/226 280/164 421/412 +f 132/230 199/374 50/172 +f 430/358 421/412 280/164 +f 210/308 50/172 199/374 +f 366/201 365/206 380/136 +f 137/205 151/139 136/210 +f 395/76 380/136 365/206 +f 170/63 136/210 151/139 +f 356/251 278/174 438/310 +f 127/255 218/30 48/182 +f 344/313 438/310 278/174 +f 115/318 48/182 218/30 +f 444/278 445/273 283/150 +f 224/468 53/157 225/463 +f 284/146 283/150 445/273 +f 54/153 225/463 53/157 +f 282/154 276/184 364/211 +f 52/162 135/215 46/192 +f 441/293 364/211 276/184 +f 221/15 46/192 135/215 +f 432/346 263/249 396/72 +f 212/60 171/58 33/257 +f 370/181 396/72 263/249 +f 141/185 33/257 171/58 +f 338/349 300/82 339/343 +f 109/354 110/348 70/89 +f 298/90 339/343 300/82 +f 68/97 70/89 110/348 +f 336/361 274/194 322/445 +f 107/366 92/454 44/202 +f 376/152 322/445 274/194 +f 147/155 44/202 92/454 +f 349/286 451/243 350/281 +f 120/290 121/285 231/431 +f 452/238 350/281 451/243 +f 232/425 231/431 121/285 +f 468/158 360/231 343/319 +f 248/329 114/324 131/235 +f 447/263 343/319 360/231 +f 227/453 131/235 114/324 +f 283/150 284/146 335/367 +f 53/157 106/372 54/153 +f 294/106 335/367 284/146 +f 64/113 54/153 106/372 +f 251/311 459/203 463/183 +f 21/321 243/359 239/383 +f 462/188 463/183 459/203 +f 242/365 239/383 243/359 +f 277/179 354/261 301/78 +f 47/187 71/85 125/265 +f 384/120 301/78 354/261 +f 157/115 125/265 71/85 +f 326/421 293/110 325/427 +f 97/426 96/432 63/117 +f 309/42 325/427 293/110 +f 79/51 63/117 96/432 +f 284/146 277/179 294/106 +f 54/153 64/113 47/187 +f 301/78 294/106 277/179 +f 71/85 47/187 64/113 +f 448/258 265/239 346/301 +f 228/448 117/306 35/247 +f 373/166 346/301 265/239 +f 144/170 35/247 117/306 +f 353/266 346/301 347/296 +f 124/270 118/300 117/306 +f 341/331 347/296 346/301 +f 112/336 117/306 118/300 +f 2/435 20/327 275/189 +f 2/435 45/197 20/327 +f 355/256 275/189 20/327 +f 126/260 20/327 45/197 +f 249/323 282/154 457/213 +f 4/423 237/395 52/162 +f 364/211 457/213 282/154 +f 135/215 52/162 237/395 +f 426/382 427/376 428/370 +f 206/332 208/320 207/326 +f 437/316 428/370 427/376 +f 217/35 207/326 208/320 +f 381/132 382/128 253/299 +f 154/127 23/309 155/123 +f 257/279 253/299 382/128 +f 27/287 155/123 23/309 +f 392/88 394/80 270/214 +f 166/79 40/222 168/71 +f 268/224 270/214 394/80 +f 38/232 168/71 40/222 +f 200/368 429/364 201/362 +f 200/368 201/362 209/314 +f 422/406 201/362 429/364 +f 202/356 209/314 201/362 +f 331/391 330/397 267/229 +f 102/396 37/237 101/402 +f 372/171 267/229 330/397 +f 143/175 101/402 37/237 +f 423/400 433/340 274/194 +f 203/350 44/202 213/55 +f 288/130 274/194 433/340 +f 58/137 213/55 44/202 +f 291/118 251/311 329/403 +f 61/125 100/408 21/321 +f 463/183 329/403 251/311 +f 243/359 21/321 100/408 +f 259/269 287/134 386/112 +f 29/277 159/107 57/141 +f 385/116 386/112 287/134 +f 158/111 57/141 159/107 +f 343/319 447/263 354/261 +f 114/324 125/265 227/453 +f 266/234 354/261 447/263 +f 36/242 227/453 125/265 +f 258/274 387/108 260/264 +f 28/282 30/272 160/103 +f 388/104 260/264 387/108 +f 161/99 160/103 30/272 +f 431/352 423/400 432/346 +f 211/302 212/60 203/350 +f 425/388 432/346 423/400 +f 205/338 203/350 212/60 +f 446/268 343/319 277/179 +f 226/458 47/187 114/324 +f 354/261 277/179 343/319 +f 125/265 114/324 47/187 +f 425/388 423/400 336/361 +f 205/338 107/366 203/350 +f 274/194 336/361 423/400 +f 44/202 203/350 107/366 +f 307/52 293/110 308/47 +f 77/61 78/56 63/117 +f 326/421 308/47 293/110 +f 97/426 63/117 78/56 +f 367/196 448/258 353/266 +f 138/200 124/270 228/448 +f 346/301 353/266 448/258 +f 117/306 228/448 124/270 +f 303/70 269/219 304/66 +f 73/77 74/73 39/227 +f 272/204 304/66 269/219 +f 42/212 39/227 74/73 +f 372/171 359/236 267/229 +f 143/175 37/237 130/240 +f 424/394 267/229 359/236 +f 204/344 130/240 37/237 +f 328/409 295/102 461/193 +f 99/414 241/371 65/109 +f 456/218 461/193 295/102 +f 236/401 65/109 241/371 +f 295/102 332/385 279/169 +f 65/109 49/177 103/390 +f 280/164 279/169 332/385 +f 50/172 103/390 49/177 +f 304/66 272/204 305/62 +f 74/73 75/69 42/212 +f 273/199 305/62 272/204 +f 43/207 42/212 75/69 +f 428/370 437/316 435/328 +f 208/320 215/45 217/35 +f 433/340 435/328 437/316 +f 213/55 217/35 215/45 +f 305/62 273/199 409/9 +f 75/69 185/456 43/207 +f 408/14 409/9 273/199 +f 184/461 43/207 185/456 +f 395/76 431/352 396/72 +f 170/63 171/58 211/302 +f 432/346 396/72 431/352 +f 212/60 211/302 171/58 +f 396/72 370/181 379/140 +f 171/58 150/143 141/185 +f 401/49 379/140 370/181 +f 177/28 141/185 150/143 +f 297/94 335/367 300/82 +f 67/101 70/89 106/372 +f 334/373 300/82 335/367 +f 105/378 106/372 70/89 +f 418/430 169/67 352/271 +f 194/404 123/275 169/67 +f 7/405 352/271 169/67 +f 7/405 169/67 123/275 +f 281/159 412/462 353/266 +f 51/167 124/270 188/440 +f 377/148 353/266 412/462 +f 148/151 188/440 124/270 +f 320/455 321/450 326/421 +f 90/464 97/426 91/459 +f 308/47 326/421 321/450 +f 78/56 91/459 97/426 +f 286/138 296/98 337/355 +f 56/145 108/360 66/105 +f 297/94 337/355 296/98 +f 67/101 66/105 108/360 +f 405/29 321/450 404/34 +f 181/8 180/13 91/459 +f 320/455 404/34 321/450 +f 90/464 91/459 180/13 +f 331/391 349/286 330/397 +f 102/396 101/402 120/290 +f 350/281 330/397 349/286 +f 121/285 120/290 101/402 +f 335/367 294/106 334/373 +f 106/372 105/378 64/113 +f 299/86 334/373 294/106 +f 69/93 64/113 105/378 +f 324/433 455/223 367/196 +f 94/444 138/200 235/407 +f 448/258 367/196 455/223 +f 228/448 235/407 138/200 +f 17/345 316/7 16/351 +f 17/345 16/351 86/16 +f 317/2 16/351 316/7 +f 87/11 86/16 16/351 +f 430/358 280/164 359/236 +f 210/308 130/240 50/172 +f 332/385 359/236 280/164 +f 103/390 50/172 130/240 +f 16/351 317/2 15/357 +f 16/351 15/357 87/11 +f 318/465 15/357 317/2 +f 88/6 87/11 15/357 +f 9/393 286/138 10/387 +f 9/393 10/387 56/145 +f 337/355 10/387 286/138 +f 108/360 56/145 10/387 +f 330/397 350/281 278/174 +f 101/402 48/182 121/285 +f 351/276 278/174 350/281 +f 122/280 121/285 48/182 +f 253/299 254/294 381/132 +f 23/309 154/127 24/303 +f 375/156 381/132 254/294 +f 146/160 24/303 154/127 +f 403/39 404/34 319/460 +f 179/18 89/1 180/13 +f 320/455 319/460 404/34 +f 90/464 180/13 89/1 +f 352/271 7/405 420/418 +f 123/275 197/386 7/405 +f 198/380 420/418 7/405 +f 198/380 7/405 197/386 +f 325/427 319/460 326/421 +f 96/432 97/426 89/1 +f 320/455 326/421 319/460 +f 90/464 89/1 97/426 +f 398/64 368/191 366/201 +f 173/48 137/205 139/195 +f 365/206 366/201 368/191 +f 136/210 139/195 137/205 +f 289/126 436/322 398/64 +f 59/133 173/48 216/40 +f 368/191 398/64 436/322 +f 139/195 216/40 173/48 +f 439/304 440/298 345/307 +f 219/25 116/312 220/20 +f 279/169 345/307 440/298 +f 49/177 220/20 116/312 +f 272/204 312/27 273/199 +f 42/212 43/207 82/36 +f 311/32 273/199 312/27 +f 81/41 82/36 43/207 +f 6/411 282/154 196/392 +f 6/411 196/392 52/162 +f 249/323 196/392 282/154 +f 4/423 52/162 196/392 +f 274/194 288/130 376/152 +f 44/202 147/155 58/137 +f 292/114 376/152 288/130 +f 62/121 58/137 147/155 +f 397/68 429/364 176/33 +f 172/53 176/33 209/314 +f 200/368 176/33 429/364 +f 200/368 209/314 176/33 +f 269/219 313/22 272/204 +f 39/227 42/212 83/31 +f 312/27 272/204 313/22 +f 82/36 83/31 42/212 +f 445/273 446/268 284/146 +f 225/463 54/153 226/458 +f 277/179 284/146 446/268 +f 47/187 226/458 54/153 +f 255/289 340/337 374/161 +f 25/297 145/165 111/342 +f 391/92 374/161 340/337 +f 164/87 111/342 145/165 +f 296/98 283/150 297/94 +f 66/105 67/101 53/157 +f 335/367 297/94 283/150 +f 106/372 53/157 67/101 +f 347/296 449/253 348/291 +f 118/300 119/295 229/443 +f 450/248 348/291 449/253 +f 230/437 229/443 119/295 +f 455/223 357/246 448/258 +f 235/407 228/448 128/250 +f 265/239 448/258 357/246 +f 35/247 128/250 228/448 +f 337/355 297/94 338/349 +f 108/360 109/354 67/101 +f 300/82 338/349 297/94 +f 70/89 67/101 109/354 +f 152/135 338/349 11/381 +f 152/135 11/381 109/354 +f 339/343 11/381 338/349 +f 110/348 109/354 11/381 +f 279/169 440/298 295/102 +f 49/177 65/109 220/20 +f 456/218 295/102 440/298 +f 236/401 220/20 65/109 +f 408/14 416/442 293/110 +f 184/461 63/117 192/416 +f 309/42 293/110 416/442 +f 79/51 192/416 63/117 +f 359/236 372/171 430/358 +f 130/240 210/308 143/175 +f 356/251 430/358 372/171 +f 127/255 143/175 210/308 +f 346/301 373/166 341/331 +f 117/306 112/336 144/170 +f 266/234 341/331 373/166 +f 36/242 144/170 112/336 +f 389/100 391/92 467/163 +f 162/95 247/335 164/87 +f 250/317 467/163 391/92 +f 8/399 164/87 247/335 +f 353/266 347/296 281/159 +f 124/270 51/167 118/300 +f 348/291 281/159 347/296 +f 119/295 118/300 51/167 +f 296/98 443/283 283/150 +f 66/105 53/157 223/5 +f 444/278 283/150 443/283 +f 224/468 223/5 53/157 +f 20/327 95/438 355/256 +f 20/327 126/260 95/438 +f 371/176 355/256 95/438 +f 142/180 95/438 126/260 +f 296/98 286/138 443/283 +f 66/105 223/5 56/145 +f 442/288 443/283 286/138 +f 222/10 56/145 223/5 +f 420/418 198/380 249/323 +f 197/386 4/423 198/380 +f 196/392 249/323 198/380 +f 196/392 198/380 4/423 +f 360/231 264/244 256/284 +f 131/235 26/292 34/252 +f 250/317 256/284 264/244 +f 8/399 34/252 26/292 +f 276/184 275/189 441/293 +f 46/192 221/15 45/197 +f 458/208 441/293 275/189 +f 238/389 45/197 221/15 +f 301/78 384/120 302/74 +f 71/85 72/81 157/115 +f 369/186 302/74 384/120 +f 140/190 157/115 72/81 +f 418/430 352/271 466/168 +f 194/404 246/341 123/275 +f 413/457 466/168 352/271 +f 189/434 123/275 246/341 +f 467/163 264/244 468/158 +f 247/335 248/329 34/252 +f 360/231 468/158 264/244 +f 131/235 34/252 248/329 +f 390/96 252/305 369/186 +f 163/91 140/190 22/315 +f 302/74 369/186 252/305 +f 72/81 22/315 140/190 +f 375/156 387/108 381/132 +f 146/160 154/127 160/103 +f 386/112 381/132 387/108 +f 159/107 160/103 154/127 +f 380/136 395/76 379/140 +f 151/139 150/143 170/63 +f 396/72 379/140 395/76 +f 171/58 170/63 150/143 +f 352/271 420/418 413/457 +f 123/275 189/434 197/386 +f 400/54 413/457 420/418 +f 175/38 197/386 189/434 +f 427/376 323/439 437/316 +f 207/326 217/35 93/449 +f 411/467 437/316 323/439 +f 187/446 93/449 217/35 +f 388/104 374/161 389/100 +f 161/99 162/95 145/165 +f 391/92 389/100 374/161 +f 164/87 145/165 162/95 +f 394/80 327/415 165/83 +f 168/71 165/83 98/420 +f 3/429 165/83 327/415 +f 3/429 98/420 165/83 +f 355/256 371/176 462/188 +f 126/260 242/365 142/180 +f 463/183 462/188 371/176 +f 243/359 142/180 242/365 +f 1/441 268/224 165/83 +f 1/441 165/83 38/232 +f 394/80 165/83 268/224 +f 168/71 38/232 165/83 +f 12/375 13/369 303/70 +f 12/375 73/77 13/369 +f 269/219 303/70 13/369 +f 39/227 13/369 73/77 +f 387/108 375/156 388/104 +f 160/103 161/99 146/160 +f 374/161 388/104 375/156 +f 145/165 146/160 161/99 +f 13/369 14/363 269/219 +f 13/369 39/227 14/363 +f 313/22 269/219 14/363 +f 83/31 14/363 39/227 +f 294/106 301/78 299/86 +f 64/113 69/93 71/85 +f 302/74 299/86 301/78 +f 72/81 71/85 69/93 +f 341/331 266/234 262/254 +f 112/336 32/262 36/242 +f 447/263 262/254 266/234 +f 227/453 36/242 32/262 +f 381/132 386/112 382/128 +f 154/127 155/123 159/107 +f 385/116 382/128 386/112 +f 158/111 159/107 155/123 +f 281/159 331/391 426/382 +f 51/167 206/332 102/396 +f 267/229 426/382 331/391 +f 37/237 102/396 206/332 +f 424/394 392/88 427/376 +f 204/344 207/326 166/79 +f 323/439 427/376 392/88 +f 93/449 166/79 207/326 +f 430/358 356/251 421/412 +f 210/308 199/374 127/255 +f 438/310 421/412 356/251 +f 218/30 127/255 199/374 +f 392/88 328/409 394/80 +f 166/79 168/71 99/414 +f 327/415 394/80 328/409 +f 98/420 99/414 168/71 +f 458/208 439/304 441/293 +f 238/389 221/15 219/25 +f 345/307 441/293 439/304 +f 116/312 219/25 221/15 +f 383/124 363/216 342/325 +f 156/119 113/330 134/220 +f 464/178 342/325 363/216 +f 244/353 134/220 113/330 +f 458/208 462/188 460/198 +f 238/389 240/377 242/365 +f 459/203 460/198 462/188 +f 239/383 242/365 240/377 +f 435/328 431/352 365/206 +f 215/45 136/210 211/302 +f 395/76 365/206 431/352 +f 170/63 211/302 136/210 +f 415/447 464/178 399/59 +f 191/422 174/43 244/353 +f 363/216 399/59 464/178 +f 134/220 244/353 174/43 +f 263/249 429/364 370/181 +f 33/257 141/185 209/314 +f 397/68 370/181 429/364 +f 172/53 209/314 141/185 +f 458/208 275/189 462/188 +f 238/389 242/365 45/197 +f 355/256 462/188 275/189 +f 126/260 45/197 242/365 +f 317/2 404/34 318/465 +f 87/11 88/6 180/13 +f 403/39 318/465 404/34 +f 179/18 180/13 88/6 +f 316/7 405/29 317/2 +f 86/16 87/11 181/8 +f 404/34 317/2 405/29 +f 180/13 181/8 87/11 +f 315/12 406/24 316/7 +f 85/21 86/16 182/3 +f 405/29 316/7 406/24 +f 181/8 182/3 86/16 +f 314/17 407/19 315/12 +f 84/26 85/21 183/466 +f 406/24 315/12 407/19 +f 182/3 183/466 85/21 +f 419/424 407/19 422/406 +f 195/398 202/356 183/466 +f 314/17 422/406 407/19 +f 84/26 183/466 202/356 +f 367/196 402/44 324/433 +f 138/200 94/444 178/23 +f 362/221 324/433 402/44 +f 133/225 178/23 94/444 +f 409/9 408/14 307/52 +f 185/456 77/61 184/461 +f 293/110 307/52 408/14 +f 63/117 184/461 77/61 +f 409/9 307/52 410/4 +f 185/456 186/451 77/61 +f 292/114 410/4 307/52 +f 62/121 77/61 186/451 +f 411/467 410/4 288/130 +f 187/446 58/137 186/451 +f 292/114 288/130 410/4 +f 62/121 186/451 58/137 +f 437/316 411/467 433/340 +f 217/35 213/55 187/446 +f 288/130 433/340 411/467 +f 58/137 187/446 213/55 +f 435/328 417/436 428/370 +f 215/45 208/320 193/410 +f 412/462 428/370 417/436 +f 188/440 193/410 208/320 +f 265/239 369/186 373/166 +f 35/247 144/170 140/190 +f 384/120 373/166 369/186 +f 157/115 140/190 144/170 +f 458/208 460/198 439/304 +f 238/389 219/25 240/377 +f 310/37 439/304 460/198 +f 80/46 240/377 219/25 +f 353/266 377/148 367/196 +f 124/270 138/200 148/151 +f 402/44 367/196 377/148 +f 178/23 148/151 138/200 +f 5/417 2/435 276/184 +f 5/417 46/192 2/435 +f 275/189 276/184 2/435 +f 45/197 2/435 46/192 +f 429/364 263/249 422/406 +f 209/314 202/356 33/257 +f 419/424 422/406 263/249 +f 195/398 33/257 202/356 +f 328/409 359/236 295/102 +f 99/414 65/109 130/240 +f 332/385 295/102 359/236 +f 103/390 130/240 65/109 +f 368/191 436/322 417/436 +f 139/195 193/410 216/40 +f 434/334 417/436 436/322 +f 214/50 216/40 193/410 +f 456/218 440/298 290/122 +f 236/401 60/129 220/20 +f 393/84 290/122 440/298 +f 167/75 220/20 60/129 +f 329/403 463/183 327/415 +f 100/408 98/420 243/359 +f 371/176 327/415 463/183 +f 142/180 243/359 98/420 +f 327/415 371/176 3/429 +f 98/420 3/429 142/180 +f 95/438 3/429 371/176 +f 95/438 142/180 3/429 +f 461/193 456/218 306/57 +f 241/371 76/65 236/401 +f 290/122 306/57 456/218 +f 60/129 236/401 76/65 +f 449/253 340/337 450/248 +f 229/443 230/437 111/342 +f 255/289 450/248 340/337 +f 25/297 111/342 230/437 +f 262/254 447/263 256/284 +f 32/262 26/292 227/453 +f 360/231 256/284 447/263 +f 131/235 227/453 26/292 +f 450/248 255/289 451/243 +f 230/437 231/431 25/297 +f 254/294 451/243 255/289 +f 24/303 25/297 231/431 +f 451/243 254/294 452/238 +f 231/431 232/425 24/303 +f 253/299 452/238 254/294 +f 23/309 24/303 232/425 +f 452/238 253/299 453/233 +f 232/425 233/419 23/309 +f 257/279 453/233 253/299 +f 27/287 23/309 233/419 +f 257/279 342/325 453/233 +f 27/287 233/419 113/330 +f 454/228 453/233 342/325 +f 234/413 113/330 233/419 +f 414/452 465/173 415/447 +f 190/428 191/422 245/347 +f 464/178 415/447 465/173 +f 244/353 245/347 191/422 +f 442/288 414/452 287/134 +f 222/10 57/141 190/428 +f 415/447 287/134 414/452 +f 191/422 190/428 57/141 +f 442/288 287/134 443/283 +f 222/10 223/5 57/141 +f 259/269 443/283 287/134 +f 29/277 57/141 223/5 +f 443/283 259/269 444/278 +f 223/5 224/468 29/277 +f 258/274 444/278 259/269 +f 28/282 29/277 224/468 +f 445/273 444/278 260/264 +f 225/463 30/272 224/468 +f 258/274 260/264 444/278 +f 28/282 224/468 30/272 +f 260/264 261/259 445/273 +f 30/272 225/463 31/267 +f 446/268 445/273 261/259 +f 226/458 31/267 225/463 +f 261/259 468/158 446/268 +f 31/267 226/458 248/329 +f 343/319 446/268 468/158 +f 114/324 248/329 226/458 +f 251/311 310/37 459/203 +f 21/321 239/383 80/46 +f 460/198 459/203 310/37 +f 240/377 80/46 239/383 +f 291/118 306/57 393/84 +f 61/125 167/75 76/65 +f 290/122 393/84 306/57 +f 60/129 76/65 167/75 +f 461/193 306/57 329/403 +f 241/371 100/408 76/65 +f 291/118 329/403 306/57 +f 61/125 76/65 100/408 +f 377/148 434/334 402/44 +f 148/151 178/23 214/50 +f 436/322 402/44 434/334 +f 216/40 214/50 178/23 +f 251/311 291/118 310/37 +f 21/321 80/46 61/125 +f 393/84 310/37 291/118 +f 167/75 61/125 80/46 +f 412/462 417/436 377/148 +f 188/440 148/151 193/410 +f 434/334 377/148 417/436 +f 214/50 193/410 148/151 +f 342/325 464/178 454/228 +f 113/330 234/413 244/353 +f 465/173 454/228 464/178 +f 245/347 244/353 234/413 +f 454/228 465/173 358/241 +f 234/413 129/245 245/347 +f 466/168 358/241 465/173 +f 246/341 245/347 129/245 +f 413/457 344/313 466/168 +f 189/434 246/341 115/318 +f 358/241 466/168 344/313 +f 129/245 115/318 246/341 +f 438/310 344/313 400/54 +f 218/30 175/38 115/318 +f 413/457 400/54 344/313 +f 189/434 115/318 175/38 +f 364/211 441/293 361/226 +f 135/215 132/230 221/15 +f 345/307 361/226 441/293 +f 116/312 221/15 132/230 +f 457/213 421/412 400/54 +f 237/395 175/38 199/374 +f 438/310 400/54 421/412 +f 218/30 199/374 175/38 +f 457/213 364/211 421/412 +f 237/395 199/374 135/215 +f 361/226 421/412 364/211 +f 132/230 135/215 199/374 +f 362/221 402/44 289/126 +f 133/225 59/133 178/23 +f 436/322 289/126 402/44 +f 216/40 178/23 59/133 +f 354/261 266/234 384/120 +f 125/265 157/115 36/242 +f 373/166 384/120 266/234 +f 144/170 36/242 157/115 +f 256/284 250/317 340/337 +f 26/292 111/342 8/399 +f 391/92 340/337 250/317 +f 164/87 8/399 111/342 +f 262/254 256/284 449/253 +f 32/262 229/443 26/292 +f 340/337 449/253 256/284 +f 111/342 26/292 229/443 +f 15/357 318/465 14/363 +f 15/357 14/363 88/6 +f 313/22 14/363 318/465 +f 83/31 88/6 14/363 +f 318/465 403/39 313/22 +f 88/6 83/31 179/18 +f 312/27 313/22 403/39 +f 82/36 179/18 83/31 +f 403/39 319/460 312/27 +f 179/18 82/36 89/1 +f 311/32 312/27 319/460 +f 81/41 89/1 82/36 +f 319/460 325/427 311/32 +f 89/1 81/41 96/432 +f 416/442 311/32 325/427 +f 192/416 96/432 81/41 diff --git a/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png b/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png new file mode 100644 index 0000000..2acd991 Binary files /dev/null and b/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png differ diff --git a/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_detection.pbtxt b/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_detection.pbtxt new file mode 100644 index 0000000..c4389a6 --- /dev/null +++ b/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_detection.pbtxt @@ -0,0 +1,78 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +input_source: FACE_DETECTION_PIPELINE +procrustes_landmark_basis { landmark_id: 0 weight: 1.0 } +procrustes_landmark_basis { landmark_id: 1 weight: 1.0 } +procrustes_landmark_basis { landmark_id: 2 weight: 1.0 } +procrustes_landmark_basis { landmark_id: 3 weight: 1.0 } +procrustes_landmark_basis { landmark_id: 4 weight: 1.0 } +procrustes_landmark_basis { landmark_id: 5 weight: 1.0 } +# NOTE: the triangular topology of the face meshes is only useful when derived +# from the 468 face landmarks, not from the 6 face detection landmarks +# (keypoints). The former don't cover the entire face and this mesh is +# defined here only to comply with the API. It should be considered as +# a placeholder and/or for debugging purposes. +# +# Use the face geometry derived from the face detection landmarks +# (keypoints) for the face pose transformation matrix, not the mesh. +canonical_mesh: { + vertex_type: VERTEX_PT + primitive_type: TRIANGLE + vertex_buffer: -3.1511454582214355 + vertex_buffer: 2.6246179342269897 + vertex_buffer: 3.4656630754470825 + vertex_buffer: 0.349575996398926 + vertex_buffer: 0.38137748837470997 + vertex_buffer: 3.1511454582214355 + vertex_buffer: 2.6246179342269897 + vertex_buffer: 3.4656630754470825 + vertex_buffer: 0.650443494319916 + vertex_buffer: 0.38137999176979054 + vertex_buffer: 0.0 + vertex_buffer: -1.126865029335022 + vertex_buffer: 7.475604057312012 + vertex_buffer: 0.500025987625122 + vertex_buffer: 0.547487020492554 + vertex_buffer: 0.0 + vertex_buffer: -4.304508209228516 + vertex_buffer: 4.162498950958252 + vertex_buffer: 0.499989986419678 + vertex_buffer: 0.694203019142151 + vertex_buffer: -7.664182186126709 + vertex_buffer: 0.673132002353668 + vertex_buffer: -2.435867071151733 + vertex_buffer: 0.007561000064015 + vertex_buffer: 0.480777025222778 + vertex_buffer: 7.664182186126709 + vertex_buffer: 0.673132002353668 + vertex_buffer: -2.435867071151733 + vertex_buffer: 0.992439985275269 + vertex_buffer: 0.480777025222778 + index_buffer: 0 + index_buffer: 1 + index_buffer: 2 + index_buffer: 1 + index_buffer: 5 + index_buffer: 2 + index_buffer: 4 + index_buffer: 0 + index_buffer: 2 + index_buffer: 4 + index_buffer: 2 + index_buffer: 3 + index_buffer: 2 + index_buffer: 5 + index_buffer: 3 +} diff --git a/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_landmarks.pbtxt b/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_landmarks.pbtxt new file mode 100644 index 0000000..8dfb463 --- /dev/null +++ b/mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_landmarks.pbtxt @@ -0,0 +1,5086 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +input_source: FACE_LANDMARK_PIPELINE +procrustes_landmark_basis { landmark_id: 4 weight: 0.070909939706326 } +procrustes_landmark_basis { landmark_id: 6 weight: 0.032100144773722 } +procrustes_landmark_basis { landmark_id: 10 weight: 0.008446550928056 } +procrustes_landmark_basis { landmark_id: 33 weight: 0.058724168688059 } +procrustes_landmark_basis { landmark_id: 54 weight: 0.007667080033571 } +procrustes_landmark_basis { landmark_id: 67 weight: 0.009078059345484 } +procrustes_landmark_basis { landmark_id: 117 weight: 0.009791937656701 } +procrustes_landmark_basis { landmark_id: 119 weight: 0.014565368182957 } +procrustes_landmark_basis { landmark_id: 121 weight: 0.018591361120343 } +procrustes_landmark_basis { landmark_id: 127 weight: 0.005197994410992 } +procrustes_landmark_basis { landmark_id: 129 weight: 0.120625205338001 } +procrustes_landmark_basis { landmark_id: 132 weight: 0.005560018587857 } +procrustes_landmark_basis { landmark_id: 133 weight: 0.05328618362546 } +procrustes_landmark_basis { landmark_id: 136 weight: 0.066890455782413 } +procrustes_landmark_basis { landmark_id: 143 weight: 0.014816547743976 } +procrustes_landmark_basis { landmark_id: 147 weight: 0.014262833632529 } +procrustes_landmark_basis { landmark_id: 198 weight: 0.025462191551924 } +procrustes_landmark_basis { landmark_id: 205 weight: 0.047252278774977 } +procrustes_landmark_basis { landmark_id: 263 weight: 0.058724168688059 } +procrustes_landmark_basis { landmark_id: 284 weight: 0.007667080033571 } +procrustes_landmark_basis { landmark_id: 297 weight: 0.009078059345484 } +procrustes_landmark_basis { landmark_id: 346 weight: 0.009791937656701 } +procrustes_landmark_basis { landmark_id: 348 weight: 0.014565368182957 } +procrustes_landmark_basis { landmark_id: 350 weight: 0.018591361120343 } +procrustes_landmark_basis { landmark_id: 356 weight: 0.005197994410992 } +procrustes_landmark_basis { landmark_id: 358 weight: 0.120625205338001 } +procrustes_landmark_basis { landmark_id: 361 weight: 0.005560018587857 } +procrustes_landmark_basis { landmark_id: 362 weight: 0.05328618362546 } +procrustes_landmark_basis { landmark_id: 365 weight: 0.066890455782413 } +procrustes_landmark_basis { landmark_id: 372 weight: 0.014816547743976 } +procrustes_landmark_basis { landmark_id: 376 weight: 0.014262833632529 } +procrustes_landmark_basis { landmark_id: 420 weight: 0.025462191551924 } +procrustes_landmark_basis { landmark_id: 425 weight: 0.047252278774977 } +canonical_mesh: { + vertex_type: VERTEX_PT + primitive_type: TRIANGLE + vertex_buffer: 0.000000 + vertex_buffer: -3.406404 + vertex_buffer: 5.979507 + vertex_buffer: 0.499977 + vertex_buffer: 0.652534 + vertex_buffer: 0.000000 + vertex_buffer: -1.126865 + vertex_buffer: 7.475604 + vertex_buffer: 0.500026 + vertex_buffer: 0.547487 + vertex_buffer: 0.000000 + vertex_buffer: -2.089024 + vertex_buffer: 6.058267 + vertex_buffer: 0.499974 + vertex_buffer: 0.602372 + vertex_buffer: -0.463928 + vertex_buffer: 0.955357 + vertex_buffer: 6.633583 + vertex_buffer: 0.482113 + vertex_buffer: 0.471979 + vertex_buffer: 0.000000 + vertex_buffer: -0.463170 + vertex_buffer: 7.586580 + vertex_buffer: 0.500151 + vertex_buffer: 0.527156 + vertex_buffer: 0.000000 + vertex_buffer: 0.365669 + vertex_buffer: 7.242870 + vertex_buffer: 0.499910 + vertex_buffer: 0.498253 + vertex_buffer: 0.000000 + vertex_buffer: 2.473255 + vertex_buffer: 5.788627 + vertex_buffer: 0.499523 + vertex_buffer: 0.401062 + vertex_buffer: -4.253081 + vertex_buffer: 2.577646 + vertex_buffer: 3.279702 + vertex_buffer: 0.289712 + vertex_buffer: 0.380764 + vertex_buffer: 0.000000 + vertex_buffer: 4.019042 + vertex_buffer: 5.284764 + vertex_buffer: 0.499955 + vertex_buffer: 0.312398 + vertex_buffer: 0.000000 + vertex_buffer: 4.885979 + vertex_buffer: 5.385258 + vertex_buffer: 0.499987 + vertex_buffer: 0.269919 + vertex_buffer: 0.000000 + vertex_buffer: 8.261778 + vertex_buffer: 4.481535 + vertex_buffer: 0.500023 + vertex_buffer: 0.107050 + vertex_buffer: 0.000000 + vertex_buffer: -3.706811 + vertex_buffer: 5.864924 + vertex_buffer: 0.500023 + vertex_buffer: 0.666234 + vertex_buffer: 0.000000 + vertex_buffer: -3.918301 + vertex_buffer: 5.569430 + vertex_buffer: 0.500016 + vertex_buffer: 0.679224 + vertex_buffer: 0.000000 + vertex_buffer: -3.994436 + vertex_buffer: 5.219482 + vertex_buffer: 0.500023 + vertex_buffer: 0.692348 + vertex_buffer: 0.000000 + vertex_buffer: -4.542400 + vertex_buffer: 5.404754 + vertex_buffer: 0.499977 + vertex_buffer: 0.695278 + vertex_buffer: 0.000000 + vertex_buffer: -4.745577 + vertex_buffer: 5.529457 + vertex_buffer: 0.499977 + vertex_buffer: 0.705934 + vertex_buffer: 0.000000 + vertex_buffer: -5.019567 + vertex_buffer: 5.601448 + vertex_buffer: 0.499977 + vertex_buffer: 0.719385 + vertex_buffer: 0.000000 + vertex_buffer: -5.365123 + vertex_buffer: 5.535441 + vertex_buffer: 0.499977 + vertex_buffer: 0.737019 + vertex_buffer: 0.000000 + vertex_buffer: -6.149624 + vertex_buffer: 5.071372 + vertex_buffer: 0.499968 + vertex_buffer: 0.781371 + vertex_buffer: 0.000000 + vertex_buffer: -1.501095 + vertex_buffer: 7.112196 + vertex_buffer: 0.499816 + vertex_buffer: 0.562981 + vertex_buffer: -0.416106 + vertex_buffer: -1.466449 + vertex_buffer: 6.447657 + vertex_buffer: 0.473773 + vertex_buffer: 0.573910 + vertex_buffer: -7.087960 + vertex_buffer: 5.434801 + vertex_buffer: 0.099620 + vertex_buffer: 0.104907 + vertex_buffer: 0.254141 + vertex_buffer: -2.628639 + vertex_buffer: 2.035898 + vertex_buffer: 3.848121 + vertex_buffer: 0.365930 + vertex_buffer: 0.409576 + vertex_buffer: -3.198363 + vertex_buffer: 1.985815 + vertex_buffer: 3.796952 + vertex_buffer: 0.338758 + vertex_buffer: 0.413025 + vertex_buffer: -3.775151 + vertex_buffer: 2.039402 + vertex_buffer: 3.646194 + vertex_buffer: 0.311120 + vertex_buffer: 0.409460 + vertex_buffer: -4.465819 + vertex_buffer: 2.422950 + vertex_buffer: 3.155168 + vertex_buffer: 0.274658 + vertex_buffer: 0.389131 + vertex_buffer: -2.164289 + vertex_buffer: 2.189867 + vertex_buffer: 3.851822 + vertex_buffer: 0.393362 + vertex_buffer: 0.403706 + vertex_buffer: -3.208229 + vertex_buffer: 3.223926 + vertex_buffer: 4.115822 + vertex_buffer: 0.345234 + vertex_buffer: 0.344011 + vertex_buffer: -2.673803 + vertex_buffer: 3.205337 + vertex_buffer: 4.092203 + vertex_buffer: 0.370094 + vertex_buffer: 0.346076 + vertex_buffer: -3.745193 + vertex_buffer: 3.165286 + vertex_buffer: 3.972409 + vertex_buffer: 0.319322 + vertex_buffer: 0.347265 + vertex_buffer: -4.161018 + vertex_buffer: 3.059069 + vertex_buffer: 3.719554 + vertex_buffer: 0.297903 + vertex_buffer: 0.353591 + vertex_buffer: -5.062006 + vertex_buffer: 1.934418 + vertex_buffer: 2.776093 + vertex_buffer: 0.247792 + vertex_buffer: 0.410810 + vertex_buffer: -2.266659 + vertex_buffer: -7.425768 + vertex_buffer: 4.389812 + vertex_buffer: 0.396889 + vertex_buffer: 0.842755 + vertex_buffer: -4.445859 + vertex_buffer: 2.663991 + vertex_buffer: 3.173422 + vertex_buffer: 0.280098 + vertex_buffer: 0.375600 + vertex_buffer: -7.214530 + vertex_buffer: 2.263009 + vertex_buffer: 0.073150 + vertex_buffer: 0.106310 + vertex_buffer: 0.399956 + vertex_buffer: -5.799793 + vertex_buffer: 2.349546 + vertex_buffer: 2.204059 + vertex_buffer: 0.209925 + vertex_buffer: 0.391353 + vertex_buffer: -2.844939 + vertex_buffer: -0.720868 + vertex_buffer: 4.433130 + vertex_buffer: 0.355808 + vertex_buffer: 0.534406 + vertex_buffer: -0.711452 + vertex_buffer: -3.329355 + vertex_buffer: 5.877044 + vertex_buffer: 0.471751 + vertex_buffer: 0.650404 + vertex_buffer: -0.606033 + vertex_buffer: -3.924562 + vertex_buffer: 5.444923 + vertex_buffer: 0.474155 + vertex_buffer: 0.680192 + vertex_buffer: -1.431615 + vertex_buffer: -3.500953 + vertex_buffer: 5.496189 + vertex_buffer: 0.439785 + vertex_buffer: 0.657229 + vertex_buffer: -1.914910 + vertex_buffer: -3.803146 + vertex_buffer: 5.028930 + vertex_buffer: 0.414617 + vertex_buffer: 0.666541 + vertex_buffer: -1.131043 + vertex_buffer: -3.973937 + vertex_buffer: 5.189648 + vertex_buffer: 0.450374 + vertex_buffer: 0.680861 + vertex_buffer: -1.563548 + vertex_buffer: -4.082763 + vertex_buffer: 4.842263 + vertex_buffer: 0.428771 + vertex_buffer: 0.682691 + vertex_buffer: -2.650112 + vertex_buffer: -5.003649 + vertex_buffer: 4.188483 + vertex_buffer: 0.374971 + vertex_buffer: 0.727805 + vertex_buffer: -0.427049 + vertex_buffer: -1.094134 + vertex_buffer: 7.360529 + vertex_buffer: 0.486717 + vertex_buffer: 0.547629 + vertex_buffer: -0.496396 + vertex_buffer: -0.475659 + vertex_buffer: 7.440358 + vertex_buffer: 0.485301 + vertex_buffer: 0.527395 + vertex_buffer: -5.253307 + vertex_buffer: 3.881582 + vertex_buffer: 3.363159 + vertex_buffer: 0.257765 + vertex_buffer: 0.314490 + vertex_buffer: -1.718698 + vertex_buffer: 0.974609 + vertex_buffer: 4.558359 + vertex_buffer: 0.401223 + vertex_buffer: 0.455172 + vertex_buffer: -1.608635 + vertex_buffer: -0.942516 + vertex_buffer: 5.814193 + vertex_buffer: 0.429819 + vertex_buffer: 0.548615 + vertex_buffer: -1.651267 + vertex_buffer: -0.610868 + vertex_buffer: 5.581319 + vertex_buffer: 0.421352 + vertex_buffer: 0.533741 + vertex_buffer: -4.765501 + vertex_buffer: -0.701554 + vertex_buffer: 3.534632 + vertex_buffer: 0.276896 + vertex_buffer: 0.532057 + vertex_buffer: -0.478306 + vertex_buffer: 0.295766 + vertex_buffer: 7.101013 + vertex_buffer: 0.483370 + vertex_buffer: 0.499587 + vertex_buffer: -3.734964 + vertex_buffer: 4.508230 + vertex_buffer: 4.550454 + vertex_buffer: 0.337212 + vertex_buffer: 0.282883 + vertex_buffer: -4.588603 + vertex_buffer: 4.302037 + vertex_buffer: 4.048484 + vertex_buffer: 0.296392 + vertex_buffer: 0.293243 + vertex_buffer: -6.279331 + vertex_buffer: 6.615427 + vertex_buffer: 1.425850 + vertex_buffer: 0.169295 + vertex_buffer: 0.193814 + vertex_buffer: -1.220941 + vertex_buffer: 4.142165 + vertex_buffer: 5.106035 + vertex_buffer: 0.447580 + vertex_buffer: 0.302610 + vertex_buffer: -2.193489 + vertex_buffer: 3.100317 + vertex_buffer: 4.000575 + vertex_buffer: 0.392390 + vertex_buffer: 0.353888 + vertex_buffer: -3.102642 + vertex_buffer: -4.352984 + vertex_buffer: 4.095905 + vertex_buffer: 0.354490 + vertex_buffer: 0.696784 + vertex_buffer: -6.719682 + vertex_buffer: -4.788645 + vertex_buffer: -1.745401 + vertex_buffer: 0.067305 + vertex_buffer: 0.730105 + vertex_buffer: -1.193824 + vertex_buffer: -1.306795 + vertex_buffer: 5.737747 + vertex_buffer: 0.442739 + vertex_buffer: 0.572826 + vertex_buffer: -0.729766 + vertex_buffer: -1.593712 + vertex_buffer: 5.833208 + vertex_buffer: 0.457098 + vertex_buffer: 0.584792 + vertex_buffer: -2.456206 + vertex_buffer: -4.342621 + vertex_buffer: 4.283884 + vertex_buffer: 0.381974 + vertex_buffer: 0.694711 + vertex_buffer: -2.204823 + vertex_buffer: -4.304508 + vertex_buffer: 4.162499 + vertex_buffer: 0.392389 + vertex_buffer: 0.694203 + vertex_buffer: -4.985894 + vertex_buffer: 4.802461 + vertex_buffer: 3.751977 + vertex_buffer: 0.277076 + vertex_buffer: 0.271932 + vertex_buffer: -1.592294 + vertex_buffer: -1.257709 + vertex_buffer: 5.456949 + vertex_buffer: 0.422552 + vertex_buffer: 0.563233 + vertex_buffer: -2.644548 + vertex_buffer: 4.524654 + vertex_buffer: 4.921559 + vertex_buffer: 0.385919 + vertex_buffer: 0.281364 + vertex_buffer: -2.760292 + vertex_buffer: 5.100971 + vertex_buffer: 5.015990 + vertex_buffer: 0.383103 + vertex_buffer: 0.255840 + vertex_buffer: -3.523964 + vertex_buffer: 8.005976 + vertex_buffer: 3.729163 + vertex_buffer: 0.331431 + vertex_buffer: 0.119714 + vertex_buffer: -5.599763 + vertex_buffer: 5.715470 + vertex_buffer: 2.724259 + vertex_buffer: 0.229924 + vertex_buffer: 0.232003 + vertex_buffer: -3.063932 + vertex_buffer: 6.566144 + vertex_buffer: 4.529981 + vertex_buffer: 0.364501 + vertex_buffer: 0.189114 + vertex_buffer: -5.720968 + vertex_buffer: 4.254584 + vertex_buffer: 2.830852 + vertex_buffer: 0.229622 + vertex_buffer: 0.299541 + vertex_buffer: -6.374393 + vertex_buffer: 4.785590 + vertex_buffer: 1.591691 + vertex_buffer: 0.173287 + vertex_buffer: 0.278748 + vertex_buffer: -0.672728 + vertex_buffer: -3.688016 + vertex_buffer: 5.737804 + vertex_buffer: 0.472879 + vertex_buffer: 0.666198 + vertex_buffer: -1.262560 + vertex_buffer: -3.787691 + vertex_buffer: 5.417779 + vertex_buffer: 0.446828 + vertex_buffer: 0.668527 + vertex_buffer: -1.732553 + vertex_buffer: -3.952767 + vertex_buffer: 5.000579 + vertex_buffer: 0.422762 + vertex_buffer: 0.673890 + vertex_buffer: -1.043625 + vertex_buffer: -1.464973 + vertex_buffer: 5.662455 + vertex_buffer: 0.445308 + vertex_buffer: 0.580066 + vertex_buffer: -2.321234 + vertex_buffer: -4.329069 + vertex_buffer: 4.258156 + vertex_buffer: 0.388103 + vertex_buffer: 0.693961 + vertex_buffer: -2.056846 + vertex_buffer: -4.477671 + vertex_buffer: 4.520883 + vertex_buffer: 0.403039 + vertex_buffer: 0.706540 + vertex_buffer: -2.153084 + vertex_buffer: -4.276322 + vertex_buffer: 4.038093 + vertex_buffer: 0.403629 + vertex_buffer: 0.693953 + vertex_buffer: -0.946874 + vertex_buffer: -1.035249 + vertex_buffer: 6.512274 + vertex_buffer: 0.460042 + vertex_buffer: 0.557139 + vertex_buffer: -1.469132 + vertex_buffer: -4.036351 + vertex_buffer: 4.604908 + vertex_buffer: 0.431158 + vertex_buffer: 0.692366 + vertex_buffer: -1.024340 + vertex_buffer: -3.989851 + vertex_buffer: 4.926693 + vertex_buffer: 0.452182 + vertex_buffer: 0.692366 + vertex_buffer: -0.533422 + vertex_buffer: -3.993222 + vertex_buffer: 5.138202 + vertex_buffer: 0.475387 + vertex_buffer: 0.692366 + vertex_buffer: -0.769720 + vertex_buffer: -6.095394 + vertex_buffer: 4.985883 + vertex_buffer: 0.465828 + vertex_buffer: 0.779190 + vertex_buffer: -0.699606 + vertex_buffer: -5.291850 + vertex_buffer: 5.448304 + vertex_buffer: 0.472329 + vertex_buffer: 0.736226 + vertex_buffer: -0.669687 + vertex_buffer: -4.949770 + vertex_buffer: 5.509612 + vertex_buffer: 0.473087 + vertex_buffer: 0.717857 + vertex_buffer: -0.630947 + vertex_buffer: -4.695101 + vertex_buffer: 5.449371 + vertex_buffer: 0.473122 + vertex_buffer: 0.704626 + vertex_buffer: -0.583218 + vertex_buffer: -4.517982 + vertex_buffer: 5.339869 + vertex_buffer: 0.473033 + vertex_buffer: 0.695278 + vertex_buffer: -1.537170 + vertex_buffer: -4.423206 + vertex_buffer: 4.745470 + vertex_buffer: 0.427942 + vertex_buffer: 0.695278 + vertex_buffer: -1.615600 + vertex_buffer: -4.475942 + vertex_buffer: 4.813632 + vertex_buffer: 0.426479 + vertex_buffer: 0.703540 + vertex_buffer: -1.729053 + vertex_buffer: -4.618680 + vertex_buffer: 4.854463 + vertex_buffer: 0.423162 + vertex_buffer: 0.711846 + vertex_buffer: -1.838624 + vertex_buffer: -4.828746 + vertex_buffer: 4.823737 + vertex_buffer: 0.418309 + vertex_buffer: 0.720063 + vertex_buffer: -2.368250 + vertex_buffer: -3.106237 + vertex_buffer: 4.868096 + vertex_buffer: 0.390095 + vertex_buffer: 0.639573 + vertex_buffer: -7.542244 + vertex_buffer: -1.049282 + vertex_buffer: -2.431321 + vertex_buffer: 0.013954 + vertex_buffer: 0.560034 + vertex_buffer: 0.000000 + vertex_buffer: -1.724003 + vertex_buffer: 6.601390 + vertex_buffer: 0.499914 + vertex_buffer: 0.580147 + vertex_buffer: -1.826614 + vertex_buffer: -4.399531 + vertex_buffer: 4.399021 + vertex_buffer: 0.413200 + vertex_buffer: 0.695400 + vertex_buffer: -1.929558 + vertex_buffer: -4.411831 + vertex_buffer: 4.497052 + vertex_buffer: 0.409626 + vertex_buffer: 0.701823 + vertex_buffer: -0.597442 + vertex_buffer: -2.013686 + vertex_buffer: 5.866456 + vertex_buffer: 0.468080 + vertex_buffer: 0.601535 + vertex_buffer: -1.405627 + vertex_buffer: -1.714196 + vertex_buffer: 5.241087 + vertex_buffer: 0.422729 + vertex_buffer: 0.585985 + vertex_buffer: -0.662449 + vertex_buffer: -1.819321 + vertex_buffer: 5.863759 + vertex_buffer: 0.463080 + vertex_buffer: 0.593784 + vertex_buffer: -2.342340 + vertex_buffer: 0.572222 + vertex_buffer: 4.294303 + vertex_buffer: 0.372120 + vertex_buffer: 0.473414 + vertex_buffer: -3.327324 + vertex_buffer: 0.104863 + vertex_buffer: 4.113860 + vertex_buffer: 0.334562 + vertex_buffer: 0.496073 + vertex_buffer: -1.726175 + vertex_buffer: -0.919165 + vertex_buffer: 5.273355 + vertex_buffer: 0.411671 + vertex_buffer: 0.546965 + vertex_buffer: -5.133204 + vertex_buffer: 7.485602 + vertex_buffer: 2.660442 + vertex_buffer: 0.242176 + vertex_buffer: 0.147676 + vertex_buffer: -4.538641 + vertex_buffer: 6.319907 + vertex_buffer: 3.683424 + vertex_buffer: 0.290777 + vertex_buffer: 0.201446 + vertex_buffer: -3.986562 + vertex_buffer: 5.109487 + vertex_buffer: 4.466315 + vertex_buffer: 0.327338 + vertex_buffer: 0.256527 + vertex_buffer: -2.169681 + vertex_buffer: -5.440433 + vertex_buffer: 4.455874 + vertex_buffer: 0.399510 + vertex_buffer: 0.748921 + vertex_buffer: -1.395634 + vertex_buffer: 5.011963 + vertex_buffer: 5.316032 + vertex_buffer: 0.441728 + vertex_buffer: 0.261676 + vertex_buffer: -1.619500 + vertex_buffer: 6.599217 + vertex_buffer: 4.921106 + vertex_buffer: 0.429765 + vertex_buffer: 0.187834 + vertex_buffer: -1.891399 + vertex_buffer: 8.236377 + vertex_buffer: 4.274997 + vertex_buffer: 0.412198 + vertex_buffer: 0.108901 + vertex_buffer: -4.195832 + vertex_buffer: 2.235205 + vertex_buffer: 3.375099 + vertex_buffer: 0.288955 + vertex_buffer: 0.398952 + vertex_buffer: -5.733342 + vertex_buffer: 1.411738 + vertex_buffer: 2.431726 + vertex_buffer: 0.218937 + vertex_buffer: 0.435411 + vertex_buffer: -1.859887 + vertex_buffer: 2.355757 + vertex_buffer: 3.843181 + vertex_buffer: 0.412782 + vertex_buffer: 0.398970 + vertex_buffer: -4.988612 + vertex_buffer: 3.074654 + vertex_buffer: 3.083858 + vertex_buffer: 0.257135 + vertex_buffer: 0.355440 + vertex_buffer: -1.303263 + vertex_buffer: 1.416453 + vertex_buffer: 4.831091 + vertex_buffer: 0.427685 + vertex_buffer: 0.437961 + vertex_buffer: -1.305757 + vertex_buffer: -0.672779 + vertex_buffer: 6.415959 + vertex_buffer: 0.448340 + vertex_buffer: 0.536936 + vertex_buffer: -6.465170 + vertex_buffer: 0.937119 + vertex_buffer: 1.689873 + vertex_buffer: 0.178560 + vertex_buffer: 0.457554 + vertex_buffer: -5.258659 + vertex_buffer: 0.945811 + vertex_buffer: 2.974312 + vertex_buffer: 0.247308 + vertex_buffer: 0.457194 + vertex_buffer: -4.432338 + vertex_buffer: 0.722096 + vertex_buffer: 3.522615 + vertex_buffer: 0.286267 + vertex_buffer: 0.467675 + vertex_buffer: -3.300681 + vertex_buffer: 0.861641 + vertex_buffer: 3.872784 + vertex_buffer: 0.332828 + vertex_buffer: 0.460712 + vertex_buffer: -2.430178 + vertex_buffer: 1.131492 + vertex_buffer: 4.039035 + vertex_buffer: 0.368756 + vertex_buffer: 0.447207 + vertex_buffer: -1.820731 + vertex_buffer: 1.467954 + vertex_buffer: 4.224124 + vertex_buffer: 0.398964 + vertex_buffer: 0.432655 + vertex_buffer: -0.563221 + vertex_buffer: 2.307693 + vertex_buffer: 5.566789 + vertex_buffer: 0.476410 + vertex_buffer: 0.405806 + vertex_buffer: -6.338145 + vertex_buffer: -0.529279 + vertex_buffer: 1.881175 + vertex_buffer: 0.189241 + vertex_buffer: 0.523924 + vertex_buffer: -5.587698 + vertex_buffer: 3.208071 + vertex_buffer: 2.687839 + vertex_buffer: 0.228962 + vertex_buffer: 0.348951 + vertex_buffer: -0.242624 + vertex_buffer: -1.462857 + vertex_buffer: 7.071491 + vertex_buffer: 0.490726 + vertex_buffer: 0.562401 + vertex_buffer: -1.611251 + vertex_buffer: 0.339326 + vertex_buffer: 4.895421 + vertex_buffer: 0.404670 + vertex_buffer: 0.485133 + vertex_buffer: -7.743095 + vertex_buffer: 2.364999 + vertex_buffer: -2.005167 + vertex_buffer: 0.019469 + vertex_buffer: 0.401564 + vertex_buffer: -1.391142 + vertex_buffer: 1.851048 + vertex_buffer: 4.448999 + vertex_buffer: 0.426243 + vertex_buffer: 0.420431 + vertex_buffer: -1.785794 + vertex_buffer: -0.978284 + vertex_buffer: 4.850470 + vertex_buffer: 0.396993 + vertex_buffer: 0.548797 + vertex_buffer: -4.670959 + vertex_buffer: 2.664461 + vertex_buffer: 3.084075 + vertex_buffer: 0.266470 + vertex_buffer: 0.376977 + vertex_buffer: -1.333970 + vertex_buffer: -0.283761 + vertex_buffer: 6.097047 + vertex_buffer: 0.439121 + vertex_buffer: 0.518958 + vertex_buffer: -7.270895 + vertex_buffer: -2.890917 + vertex_buffer: -2.252455 + vertex_buffer: 0.032314 + vertex_buffer: 0.644357 + vertex_buffer: -1.856432 + vertex_buffer: 2.585245 + vertex_buffer: 3.757904 + vertex_buffer: 0.419054 + vertex_buffer: 0.387155 + vertex_buffer: -0.923388 + vertex_buffer: 0.073076 + vertex_buffer: 6.671944 + vertex_buffer: 0.462783 + vertex_buffer: 0.505747 + vertex_buffer: -5.000589 + vertex_buffer: -6.135128 + vertex_buffer: 1.892523 + vertex_buffer: 0.238979 + vertex_buffer: 0.779745 + vertex_buffer: -5.085276 + vertex_buffer: -7.178590 + vertex_buffer: 0.714711 + vertex_buffer: 0.198221 + vertex_buffer: 0.831938 + vertex_buffer: -7.159291 + vertex_buffer: -0.811820 + vertex_buffer: -0.072044 + vertex_buffer: 0.107550 + vertex_buffer: 0.540755 + vertex_buffer: -5.843051 + vertex_buffer: -5.248023 + vertex_buffer: 0.924091 + vertex_buffer: 0.183610 + vertex_buffer: 0.740257 + vertex_buffer: -6.847258 + vertex_buffer: 3.662916 + vertex_buffer: 0.724695 + vertex_buffer: 0.134410 + vertex_buffer: 0.333683 + vertex_buffer: -2.412942 + vertex_buffer: -8.258853 + vertex_buffer: 4.119213 + vertex_buffer: 0.385764 + vertex_buffer: 0.883154 + vertex_buffer: -0.179909 + vertex_buffer: -1.689864 + vertex_buffer: 6.573301 + vertex_buffer: 0.490967 + vertex_buffer: 0.579378 + vertex_buffer: -2.103655 + vertex_buffer: -0.163946 + vertex_buffer: 4.566119 + vertex_buffer: 0.382385 + vertex_buffer: 0.508573 + vertex_buffer: -6.407571 + vertex_buffer: 2.236021 + vertex_buffer: 1.560843 + vertex_buffer: 0.174399 + vertex_buffer: 0.397671 + vertex_buffer: -3.670075 + vertex_buffer: 2.360153 + vertex_buffer: 3.635230 + vertex_buffer: 0.318785 + vertex_buffer: 0.396235 + vertex_buffer: -3.177186 + vertex_buffer: 2.294265 + vertex_buffer: 3.775704 + vertex_buffer: 0.343364 + vertex_buffer: 0.400597 + vertex_buffer: -2.196121 + vertex_buffer: -4.598322 + vertex_buffer: 4.479786 + vertex_buffer: 0.396100 + vertex_buffer: 0.710217 + vertex_buffer: -6.234883 + vertex_buffer: -1.944430 + vertex_buffer: 1.663542 + vertex_buffer: 0.187885 + vertex_buffer: 0.588538 + vertex_buffer: -1.292924 + vertex_buffer: -9.295920 + vertex_buffer: 4.094063 + vertex_buffer: 0.430987 + vertex_buffer: 0.944065 + vertex_buffer: -3.210651 + vertex_buffer: -8.533278 + vertex_buffer: 2.802001 + vertex_buffer: 0.318993 + vertex_buffer: 0.898285 + vertex_buffer: -4.068926 + vertex_buffer: -7.993109 + vertex_buffer: 1.925119 + vertex_buffer: 0.266248 + vertex_buffer: 0.869701 + vertex_buffer: 0.000000 + vertex_buffer: 6.545390 + vertex_buffer: 5.027311 + vertex_buffer: 0.500023 + vertex_buffer: 0.190576 + vertex_buffer: 0.000000 + vertex_buffer: -9.403378 + vertex_buffer: 4.264492 + vertex_buffer: 0.499977 + vertex_buffer: 0.954453 + vertex_buffer: -2.724032 + vertex_buffer: 2.315802 + vertex_buffer: 3.777151 + vertex_buffer: 0.366170 + vertex_buffer: 0.398822 + vertex_buffer: -2.288460 + vertex_buffer: 2.398891 + vertex_buffer: 3.697603 + vertex_buffer: 0.393207 + vertex_buffer: 0.395537 + vertex_buffer: -1.998311 + vertex_buffer: 2.496547 + vertex_buffer: 3.689148 + vertex_buffer: 0.410373 + vertex_buffer: 0.391080 + vertex_buffer: -6.130040 + vertex_buffer: 3.399261 + vertex_buffer: 2.038516 + vertex_buffer: 0.194993 + vertex_buffer: 0.342102 + vertex_buffer: -2.288460 + vertex_buffer: 2.886504 + vertex_buffer: 3.775031 + vertex_buffer: 0.388665 + vertex_buffer: 0.362284 + vertex_buffer: -2.724032 + vertex_buffer: 2.961810 + vertex_buffer: 3.871767 + vertex_buffer: 0.365962 + vertex_buffer: 0.355971 + vertex_buffer: -3.177186 + vertex_buffer: 2.964136 + vertex_buffer: 3.876973 + vertex_buffer: 0.343364 + vertex_buffer: 0.355357 + vertex_buffer: -3.670075 + vertex_buffer: 2.927714 + vertex_buffer: 3.724325 + vertex_buffer: 0.318785 + vertex_buffer: 0.358340 + vertex_buffer: -4.018389 + vertex_buffer: 2.857357 + vertex_buffer: 3.482983 + vertex_buffer: 0.301415 + vertex_buffer: 0.363156 + vertex_buffer: -7.555811 + vertex_buffer: 4.106811 + vertex_buffer: -0.991917 + vertex_buffer: 0.058133 + vertex_buffer: 0.319076 + vertex_buffer: -4.018389 + vertex_buffer: 2.483695 + vertex_buffer: 3.440898 + vertex_buffer: 0.301415 + vertex_buffer: 0.387449 + vertex_buffer: 0.000000 + vertex_buffer: -2.521945 + vertex_buffer: 5.932265 + vertex_buffer: 0.499988 + vertex_buffer: 0.618434 + vertex_buffer: -1.776217 + vertex_buffer: -2.683946 + vertex_buffer: 5.213116 + vertex_buffer: 0.415838 + vertex_buffer: 0.624196 + vertex_buffer: -1.222237 + vertex_buffer: -1.182444 + vertex_buffer: 5.952465 + vertex_buffer: 0.445682 + vertex_buffer: 0.566077 + vertex_buffer: -0.731493 + vertex_buffer: -2.536683 + vertex_buffer: 5.815343 + vertex_buffer: 0.465844 + vertex_buffer: 0.620641 + vertex_buffer: 0.000000 + vertex_buffer: 3.271027 + vertex_buffer: 5.236015 + vertex_buffer: 0.499923 + vertex_buffer: 0.351524 + vertex_buffer: -4.135272 + vertex_buffer: -6.996638 + vertex_buffer: 2.671970 + vertex_buffer: 0.288719 + vertex_buffer: 0.819946 + vertex_buffer: -3.311811 + vertex_buffer: -7.660815 + vertex_buffer: 3.382963 + vertex_buffer: 0.335279 + vertex_buffer: 0.852820 + vertex_buffer: -1.313701 + vertex_buffer: -8.639995 + vertex_buffer: 4.702456 + vertex_buffer: 0.440512 + vertex_buffer: 0.902419 + vertex_buffer: -5.940524 + vertex_buffer: -6.223629 + vertex_buffer: -0.631468 + vertex_buffer: 0.128294 + vertex_buffer: 0.791941 + vertex_buffer: -1.998311 + vertex_buffer: 2.743838 + vertex_buffer: 3.744030 + vertex_buffer: 0.408772 + vertex_buffer: 0.373894 + vertex_buffer: -0.901447 + vertex_buffer: 1.236992 + vertex_buffer: 5.754256 + vertex_buffer: 0.455607 + vertex_buffer: 0.451801 + vertex_buffer: 0.000000 + vertex_buffer: -8.765243 + vertex_buffer: 4.891441 + vertex_buffer: 0.499877 + vertex_buffer: 0.908990 + vertex_buffer: -2.308977 + vertex_buffer: -8.974196 + vertex_buffer: 3.609070 + vertex_buffer: 0.375437 + vertex_buffer: 0.924192 + vertex_buffer: -6.954154 + vertex_buffer: -2.439843 + vertex_buffer: -0.131163 + vertex_buffer: 0.114210 + vertex_buffer: 0.615022 + vertex_buffer: -1.098819 + vertex_buffer: -4.458788 + vertex_buffer: 5.120727 + vertex_buffer: 0.448662 + vertex_buffer: 0.695278 + vertex_buffer: -1.181124 + vertex_buffer: -4.579996 + vertex_buffer: 5.189564 + vertex_buffer: 0.448020 + vertex_buffer: 0.704632 + vertex_buffer: -1.255818 + vertex_buffer: -4.787901 + vertex_buffer: 5.237051 + vertex_buffer: 0.447112 + vertex_buffer: 0.715808 + vertex_buffer: -1.325085 + vertex_buffer: -5.106507 + vertex_buffer: 5.205010 + vertex_buffer: 0.444832 + vertex_buffer: 0.730794 + vertex_buffer: -1.546388 + vertex_buffer: -5.819392 + vertex_buffer: 4.757893 + vertex_buffer: 0.430012 + vertex_buffer: 0.766809 + vertex_buffer: -1.953754 + vertex_buffer: -4.183892 + vertex_buffer: 4.431713 + vertex_buffer: 0.406787 + vertex_buffer: 0.685673 + vertex_buffer: -2.117802 + vertex_buffer: -4.137093 + vertex_buffer: 4.555096 + vertex_buffer: 0.400738 + vertex_buffer: 0.681069 + vertex_buffer: -2.285339 + vertex_buffer: -4.051196 + vertex_buffer: 4.582438 + vertex_buffer: 0.392400 + vertex_buffer: 0.677703 + vertex_buffer: -2.850160 + vertex_buffer: -3.665720 + vertex_buffer: 4.484994 + vertex_buffer: 0.367856 + vertex_buffer: 0.663919 + vertex_buffer: -5.278538 + vertex_buffer: -2.238942 + vertex_buffer: 2.861224 + vertex_buffer: 0.247923 + vertex_buffer: 0.601333 + vertex_buffer: -0.946709 + vertex_buffer: 1.907628 + vertex_buffer: 5.196779 + vertex_buffer: 0.452770 + vertex_buffer: 0.420850 + vertex_buffer: -1.314173 + vertex_buffer: 3.104912 + vertex_buffer: 4.231404 + vertex_buffer: 0.436392 + vertex_buffer: 0.359887 + vertex_buffer: -1.780000 + vertex_buffer: 2.860000 + vertex_buffer: 3.881555 + vertex_buffer: 0.416164 + vertex_buffer: 0.368714 + vertex_buffer: -1.845110 + vertex_buffer: -4.098880 + vertex_buffer: 4.247264 + vertex_buffer: 0.413386 + vertex_buffer: 0.692366 + vertex_buffer: -5.436187 + vertex_buffer: -4.030482 + vertex_buffer: 2.109852 + vertex_buffer: 0.228018 + vertex_buffer: 0.683572 + vertex_buffer: -0.766444 + vertex_buffer: 3.182131 + vertex_buffer: 4.861453 + vertex_buffer: 0.468268 + vertex_buffer: 0.352671 + vertex_buffer: -1.938616 + vertex_buffer: -6.614410 + vertex_buffer: 4.521085 + vertex_buffer: 0.411362 + vertex_buffer: 0.804327 + vertex_buffer: 0.000000 + vertex_buffer: 1.059413 + vertex_buffer: 6.774605 + vertex_buffer: 0.499989 + vertex_buffer: 0.469825 + vertex_buffer: -0.516573 + vertex_buffer: 1.583572 + vertex_buffer: 6.148363 + vertex_buffer: 0.479154 + vertex_buffer: 0.442654 + vertex_buffer: 0.000000 + vertex_buffer: 1.728369 + vertex_buffer: 6.316750 + vertex_buffer: 0.499974 + vertex_buffer: 0.439637 + vertex_buffer: -1.246815 + vertex_buffer: 0.230297 + vertex_buffer: 5.681036 + vertex_buffer: 0.432112 + vertex_buffer: 0.493589 + vertex_buffer: 0.000000 + vertex_buffer: -7.942194 + vertex_buffer: 5.181173 + vertex_buffer: 0.499886 + vertex_buffer: 0.866917 + vertex_buffer: 0.000000 + vertex_buffer: -6.991499 + vertex_buffer: 5.153478 + vertex_buffer: 0.499913 + vertex_buffer: 0.821729 + vertex_buffer: -0.997827 + vertex_buffer: -6.930921 + vertex_buffer: 4.979576 + vertex_buffer: 0.456549 + vertex_buffer: 0.819201 + vertex_buffer: -3.288807 + vertex_buffer: -5.382514 + vertex_buffer: 3.795752 + vertex_buffer: 0.344549 + vertex_buffer: 0.745439 + vertex_buffer: -2.311631 + vertex_buffer: -1.566237 + vertex_buffer: 4.590085 + vertex_buffer: 0.378909 + vertex_buffer: 0.574010 + vertex_buffer: -2.680250 + vertex_buffer: -6.111567 + vertex_buffer: 4.096152 + vertex_buffer: 0.374293 + vertex_buffer: 0.780185 + vertex_buffer: -3.832928 + vertex_buffer: -1.537326 + vertex_buffer: 4.137731 + vertex_buffer: 0.319688 + vertex_buffer: 0.570738 + vertex_buffer: -2.961860 + vertex_buffer: -2.274215 + vertex_buffer: 4.440943 + vertex_buffer: 0.357155 + vertex_buffer: 0.604270 + vertex_buffer: -4.386901 + vertex_buffer: -2.683286 + vertex_buffer: 3.643886 + vertex_buffer: 0.295284 + vertex_buffer: 0.621581 + vertex_buffer: -1.217295 + vertex_buffer: -7.834465 + vertex_buffer: 4.969286 + vertex_buffer: 0.447750 + vertex_buffer: 0.862477 + vertex_buffer: -1.542374 + vertex_buffer: -0.136843 + vertex_buffer: 5.201008 + vertex_buffer: 0.410986 + vertex_buffer: 0.508723 + vertex_buffer: -3.878377 + vertex_buffer: -6.041764 + vertex_buffer: 3.311079 + vertex_buffer: 0.313951 + vertex_buffer: 0.775308 + vertex_buffer: -3.084037 + vertex_buffer: -6.809842 + vertex_buffer: 3.814195 + vertex_buffer: 0.354128 + vertex_buffer: 0.812553 + vertex_buffer: -3.747321 + vertex_buffer: -4.503545 + vertex_buffer: 3.726453 + vertex_buffer: 0.324548 + vertex_buffer: 0.703993 + vertex_buffer: -6.094129 + vertex_buffer: -3.205991 + vertex_buffer: 1.473482 + vertex_buffer: 0.189096 + vertex_buffer: 0.646300 + vertex_buffer: -4.588995 + vertex_buffer: -4.728726 + vertex_buffer: 2.983221 + vertex_buffer: 0.279777 + vertex_buffer: 0.714658 + vertex_buffer: -6.583231 + vertex_buffer: -3.941269 + vertex_buffer: 0.070268 + vertex_buffer: 0.133823 + vertex_buffer: 0.682701 + vertex_buffer: -3.492580 + vertex_buffer: -3.195820 + vertex_buffer: 4.130198 + vertex_buffer: 0.336768 + vertex_buffer: 0.644733 + vertex_buffer: -1.255543 + vertex_buffer: 0.802341 + vertex_buffer: 5.307551 + vertex_buffer: 0.429884 + vertex_buffer: 0.466522 + vertex_buffer: -1.126122 + vertex_buffer: -0.933602 + vertex_buffer: 6.538785 + vertex_buffer: 0.455528 + vertex_buffer: 0.548623 + vertex_buffer: -1.443109 + vertex_buffer: -1.142774 + vertex_buffer: 5.905127 + vertex_buffer: 0.437114 + vertex_buffer: 0.558896 + vertex_buffer: -0.923043 + vertex_buffer: -0.529042 + vertex_buffer: 7.003423 + vertex_buffer: 0.467288 + vertex_buffer: 0.529925 + vertex_buffer: -1.755386 + vertex_buffer: 3.529117 + vertex_buffer: 4.327696 + vertex_buffer: 0.414712 + vertex_buffer: 0.335220 + vertex_buffer: -2.632589 + vertex_buffer: 3.713828 + vertex_buffer: 4.364629 + vertex_buffer: 0.377046 + vertex_buffer: 0.322778 + vertex_buffer: -3.388062 + vertex_buffer: 3.721976 + vertex_buffer: 4.309028 + vertex_buffer: 0.344108 + vertex_buffer: 0.320151 + vertex_buffer: -4.075766 + vertex_buffer: 3.675413 + vertex_buffer: 4.076063 + vertex_buffer: 0.312876 + vertex_buffer: 0.322332 + vertex_buffer: -4.622910 + vertex_buffer: 3.474691 + vertex_buffer: 3.646321 + vertex_buffer: 0.283526 + vertex_buffer: 0.333190 + vertex_buffer: -5.171755 + vertex_buffer: 2.535753 + vertex_buffer: 2.670867 + vertex_buffer: 0.241246 + vertex_buffer: 0.382786 + vertex_buffer: -7.297331 + vertex_buffer: 0.763172 + vertex_buffer: -0.048769 + vertex_buffer: 0.102986 + vertex_buffer: 0.468763 + vertex_buffer: -4.706828 + vertex_buffer: 1.651000 + vertex_buffer: 3.109532 + vertex_buffer: 0.267612 + vertex_buffer: 0.424560 + vertex_buffer: -4.071712 + vertex_buffer: 1.476821 + vertex_buffer: 3.476944 + vertex_buffer: 0.297879 + vertex_buffer: 0.433176 + vertex_buffer: -3.269817 + vertex_buffer: 1.470659 + vertex_buffer: 3.731945 + vertex_buffer: 0.333434 + vertex_buffer: 0.433878 + vertex_buffer: -2.527572 + vertex_buffer: 1.617311 + vertex_buffer: 3.865444 + vertex_buffer: 0.366427 + vertex_buffer: 0.426116 + vertex_buffer: -1.970894 + vertex_buffer: 1.858505 + vertex_buffer: 3.961782 + vertex_buffer: 0.396012 + vertex_buffer: 0.416696 + vertex_buffer: -1.579543 + vertex_buffer: 2.097941 + vertex_buffer: 4.084996 + vertex_buffer: 0.420121 + vertex_buffer: 0.410228 + vertex_buffer: -7.664182 + vertex_buffer: 0.673132 + vertex_buffer: -2.435867 + vertex_buffer: 0.007561 + vertex_buffer: 0.480777 + vertex_buffer: -1.397041 + vertex_buffer: -1.340139 + vertex_buffer: 5.630378 + vertex_buffer: 0.432949 + vertex_buffer: 0.569518 + vertex_buffer: -0.884838 + vertex_buffer: 0.658740 + vertex_buffer: 6.233232 + vertex_buffer: 0.458639 + vertex_buffer: 0.479089 + vertex_buffer: -0.767097 + vertex_buffer: -0.968035 + vertex_buffer: 7.077932 + vertex_buffer: 0.473466 + vertex_buffer: 0.545744 + vertex_buffer: -0.460213 + vertex_buffer: -1.334106 + vertex_buffer: 6.787447 + vertex_buffer: 0.476088 + vertex_buffer: 0.563830 + vertex_buffer: -0.748618 + vertex_buffer: -1.067994 + vertex_buffer: 6.798303 + vertex_buffer: 0.468472 + vertex_buffer: 0.555057 + vertex_buffer: -1.236408 + vertex_buffer: -1.585568 + vertex_buffer: 5.480490 + vertex_buffer: 0.433991 + vertex_buffer: 0.582362 + vertex_buffer: -0.387306 + vertex_buffer: -1.409990 + vertex_buffer: 6.957705 + vertex_buffer: 0.483518 + vertex_buffer: 0.562984 + vertex_buffer: -0.319925 + vertex_buffer: -1.607931 + vertex_buffer: 6.508676 + vertex_buffer: 0.482483 + vertex_buffer: 0.577849 + vertex_buffer: -1.639633 + vertex_buffer: 2.556298 + vertex_buffer: 3.863736 + vertex_buffer: 0.426450 + vertex_buffer: 0.389799 + vertex_buffer: -1.255645 + vertex_buffer: 2.467144 + vertex_buffer: 4.203800 + vertex_buffer: 0.438999 + vertex_buffer: 0.396495 + vertex_buffer: -1.031362 + vertex_buffer: 2.382663 + vertex_buffer: 4.615849 + vertex_buffer: 0.450067 + vertex_buffer: 0.400434 + vertex_buffer: -4.253081 + vertex_buffer: 2.772296 + vertex_buffer: 3.315305 + vertex_buffer: 0.289712 + vertex_buffer: 0.368253 + vertex_buffer: -4.530000 + vertex_buffer: 2.910000 + vertex_buffer: 3.339685 + vertex_buffer: 0.276670 + vertex_buffer: 0.363373 + vertex_buffer: 0.463928 + vertex_buffer: 0.955357 + vertex_buffer: 6.633583 + vertex_buffer: 0.517862 + vertex_buffer: 0.471948 + vertex_buffer: 4.253081 + vertex_buffer: 2.577646 + vertex_buffer: 3.279702 + vertex_buffer: 0.710288 + vertex_buffer: 0.380764 + vertex_buffer: 0.416106 + vertex_buffer: -1.466449 + vertex_buffer: 6.447657 + vertex_buffer: 0.526227 + vertex_buffer: 0.573910 + vertex_buffer: 7.087960 + vertex_buffer: 5.434801 + vertex_buffer: 0.099620 + vertex_buffer: 0.895093 + vertex_buffer: 0.254141 + vertex_buffer: 2.628639 + vertex_buffer: 2.035898 + vertex_buffer: 3.848121 + vertex_buffer: 0.634070 + vertex_buffer: 0.409576 + vertex_buffer: 3.198363 + vertex_buffer: 1.985815 + vertex_buffer: 3.796952 + vertex_buffer: 0.661242 + vertex_buffer: 0.413025 + vertex_buffer: 3.775151 + vertex_buffer: 2.039402 + vertex_buffer: 3.646194 + vertex_buffer: 0.688880 + vertex_buffer: 0.409460 + vertex_buffer: 4.465819 + vertex_buffer: 2.422950 + vertex_buffer: 3.155168 + vertex_buffer: 0.725342 + vertex_buffer: 0.389131 + vertex_buffer: 2.164289 + vertex_buffer: 2.189867 + vertex_buffer: 3.851822 + vertex_buffer: 0.606630 + vertex_buffer: 0.403705 + vertex_buffer: 3.208229 + vertex_buffer: 3.223926 + vertex_buffer: 4.115822 + vertex_buffer: 0.654766 + vertex_buffer: 0.344011 + vertex_buffer: 2.673803 + vertex_buffer: 3.205337 + vertex_buffer: 4.092203 + vertex_buffer: 0.629906 + vertex_buffer: 0.346076 + vertex_buffer: 3.745193 + vertex_buffer: 3.165286 + vertex_buffer: 3.972409 + vertex_buffer: 0.680678 + vertex_buffer: 0.347265 + vertex_buffer: 4.161018 + vertex_buffer: 3.059069 + vertex_buffer: 3.719554 + vertex_buffer: 0.702097 + vertex_buffer: 0.353591 + vertex_buffer: 5.062006 + vertex_buffer: 1.934418 + vertex_buffer: 2.776093 + vertex_buffer: 0.752212 + vertex_buffer: 0.410805 + vertex_buffer: 2.266659 + vertex_buffer: -7.425768 + vertex_buffer: 4.389812 + vertex_buffer: 0.602918 + vertex_buffer: 0.842863 + vertex_buffer: 4.445859 + vertex_buffer: 2.663991 + vertex_buffer: 3.173422 + vertex_buffer: 0.719902 + vertex_buffer: 0.375600 + vertex_buffer: 7.214530 + vertex_buffer: 2.263009 + vertex_buffer: 0.073150 + vertex_buffer: 0.893693 + vertex_buffer: 0.399960 + vertex_buffer: 5.799793 + vertex_buffer: 2.349546 + vertex_buffer: 2.204059 + vertex_buffer: 0.790082 + vertex_buffer: 0.391354 + vertex_buffer: 2.844939 + vertex_buffer: -0.720868 + vertex_buffer: 4.433130 + vertex_buffer: 0.643998 + vertex_buffer: 0.534488 + vertex_buffer: 0.711452 + vertex_buffer: -3.329355 + vertex_buffer: 5.877044 + vertex_buffer: 0.528249 + vertex_buffer: 0.650404 + vertex_buffer: 0.606033 + vertex_buffer: -3.924562 + vertex_buffer: 5.444923 + vertex_buffer: 0.525850 + vertex_buffer: 0.680191 + vertex_buffer: 1.431615 + vertex_buffer: -3.500953 + vertex_buffer: 5.496189 + vertex_buffer: 0.560215 + vertex_buffer: 0.657229 + vertex_buffer: 1.914910 + vertex_buffer: -3.803146 + vertex_buffer: 5.028930 + vertex_buffer: 0.585384 + vertex_buffer: 0.666541 + vertex_buffer: 1.131043 + vertex_buffer: -3.973937 + vertex_buffer: 5.189648 + vertex_buffer: 0.549626 + vertex_buffer: 0.680861 + vertex_buffer: 1.563548 + vertex_buffer: -4.082763 + vertex_buffer: 4.842263 + vertex_buffer: 0.571228 + vertex_buffer: 0.682692 + vertex_buffer: 2.650112 + vertex_buffer: -5.003649 + vertex_buffer: 4.188483 + vertex_buffer: 0.624852 + vertex_buffer: 0.728099 + vertex_buffer: 0.427049 + vertex_buffer: -1.094134 + vertex_buffer: 7.360529 + vertex_buffer: 0.513050 + vertex_buffer: 0.547282 + vertex_buffer: 0.496396 + vertex_buffer: -0.475659 + vertex_buffer: 7.440358 + vertex_buffer: 0.515097 + vertex_buffer: 0.527252 + vertex_buffer: 5.253307 + vertex_buffer: 3.881582 + vertex_buffer: 3.363159 + vertex_buffer: 0.742247 + vertex_buffer: 0.314507 + vertex_buffer: 1.718698 + vertex_buffer: 0.974609 + vertex_buffer: 4.558359 + vertex_buffer: 0.598631 + vertex_buffer: 0.454979 + vertex_buffer: 1.608635 + vertex_buffer: -0.942516 + vertex_buffer: 5.814193 + vertex_buffer: 0.570338 + vertex_buffer: 0.548575 + vertex_buffer: 1.651267 + vertex_buffer: -0.610868 + vertex_buffer: 5.581319 + vertex_buffer: 0.578632 + vertex_buffer: 0.533623 + vertex_buffer: 4.765501 + vertex_buffer: -0.701554 + vertex_buffer: 3.534632 + vertex_buffer: 0.723087 + vertex_buffer: 0.532054 + vertex_buffer: 0.478306 + vertex_buffer: 0.295766 + vertex_buffer: 7.101013 + vertex_buffer: 0.516446 + vertex_buffer: 0.499639 + vertex_buffer: 3.734964 + vertex_buffer: 4.508230 + vertex_buffer: 4.550454 + vertex_buffer: 0.662801 + vertex_buffer: 0.282918 + vertex_buffer: 4.588603 + vertex_buffer: 4.302037 + vertex_buffer: 4.048484 + vertex_buffer: 0.703624 + vertex_buffer: 0.293271 + vertex_buffer: 6.279331 + vertex_buffer: 6.615427 + vertex_buffer: 1.425850 + vertex_buffer: 0.830705 + vertex_buffer: 0.193814 + vertex_buffer: 1.220941 + vertex_buffer: 4.142165 + vertex_buffer: 5.106035 + vertex_buffer: 0.552386 + vertex_buffer: 0.302568 + vertex_buffer: 2.193489 + vertex_buffer: 3.100317 + vertex_buffer: 4.000575 + vertex_buffer: 0.607610 + vertex_buffer: 0.353888 + vertex_buffer: 3.102642 + vertex_buffer: -4.352984 + vertex_buffer: 4.095905 + vertex_buffer: 0.645429 + vertex_buffer: 0.696707 + vertex_buffer: 6.719682 + vertex_buffer: -4.788645 + vertex_buffer: -1.745401 + vertex_buffer: 0.932695 + vertex_buffer: 0.730105 + vertex_buffer: 1.193824 + vertex_buffer: -1.306795 + vertex_buffer: 5.737747 + vertex_buffer: 0.557261 + vertex_buffer: 0.572826 + vertex_buffer: 0.729766 + vertex_buffer: -1.593712 + vertex_buffer: 5.833208 + vertex_buffer: 0.542902 + vertex_buffer: 0.584792 + vertex_buffer: 2.456206 + vertex_buffer: -4.342621 + vertex_buffer: 4.283884 + vertex_buffer: 0.618026 + vertex_buffer: 0.694711 + vertex_buffer: 2.204823 + vertex_buffer: -4.304508 + vertex_buffer: 4.162499 + vertex_buffer: 0.607591 + vertex_buffer: 0.694203 + vertex_buffer: 4.985894 + vertex_buffer: 4.802461 + vertex_buffer: 3.751977 + vertex_buffer: 0.722943 + vertex_buffer: 0.271963 + vertex_buffer: 1.592294 + vertex_buffer: -1.257709 + vertex_buffer: 5.456949 + vertex_buffer: 0.577414 + vertex_buffer: 0.563167 + vertex_buffer: 2.644548 + vertex_buffer: 4.524654 + vertex_buffer: 4.921559 + vertex_buffer: 0.614083 + vertex_buffer: 0.281387 + vertex_buffer: 2.760292 + vertex_buffer: 5.100971 + vertex_buffer: 5.015990 + vertex_buffer: 0.616907 + vertex_buffer: 0.255886 + vertex_buffer: 3.523964 + vertex_buffer: 8.005976 + vertex_buffer: 3.729163 + vertex_buffer: 0.668509 + vertex_buffer: 0.119914 + vertex_buffer: 5.599763 + vertex_buffer: 5.715470 + vertex_buffer: 2.724259 + vertex_buffer: 0.770092 + vertex_buffer: 0.232021 + vertex_buffer: 3.063932 + vertex_buffer: 6.566144 + vertex_buffer: 4.529981 + vertex_buffer: 0.635536 + vertex_buffer: 0.189249 + vertex_buffer: 5.720968 + vertex_buffer: 4.254584 + vertex_buffer: 2.830852 + vertex_buffer: 0.770391 + vertex_buffer: 0.299556 + vertex_buffer: 6.374393 + vertex_buffer: 4.785590 + vertex_buffer: 1.591691 + vertex_buffer: 0.826722 + vertex_buffer: 0.278755 + vertex_buffer: 0.672728 + vertex_buffer: -3.688016 + vertex_buffer: 5.737804 + vertex_buffer: 0.527121 + vertex_buffer: 0.666198 + vertex_buffer: 1.262560 + vertex_buffer: -3.787691 + vertex_buffer: 5.417779 + vertex_buffer: 0.553172 + vertex_buffer: 0.668527 + vertex_buffer: 1.732553 + vertex_buffer: -3.952767 + vertex_buffer: 5.000579 + vertex_buffer: 0.577238 + vertex_buffer: 0.673890 + vertex_buffer: 1.043625 + vertex_buffer: -1.464973 + vertex_buffer: 5.662455 + vertex_buffer: 0.554692 + vertex_buffer: 0.580066 + vertex_buffer: 2.321234 + vertex_buffer: -4.329069 + vertex_buffer: 4.258156 + vertex_buffer: 0.611897 + vertex_buffer: 0.693961 + vertex_buffer: 2.056846 + vertex_buffer: -4.477671 + vertex_buffer: 4.520883 + vertex_buffer: 0.596961 + vertex_buffer: 0.706540 + vertex_buffer: 2.153084 + vertex_buffer: -4.276322 + vertex_buffer: 4.038093 + vertex_buffer: 0.596371 + vertex_buffer: 0.693953 + vertex_buffer: 0.946874 + vertex_buffer: -1.035249 + vertex_buffer: 6.512274 + vertex_buffer: 0.539958 + vertex_buffer: 0.557139 + vertex_buffer: 1.469132 + vertex_buffer: -4.036351 + vertex_buffer: 4.604908 + vertex_buffer: 0.568842 + vertex_buffer: 0.692366 + vertex_buffer: 1.024340 + vertex_buffer: -3.989851 + vertex_buffer: 4.926693 + vertex_buffer: 0.547818 + vertex_buffer: 0.692366 + vertex_buffer: 0.533422 + vertex_buffer: -3.993222 + vertex_buffer: 5.138202 + vertex_buffer: 0.524613 + vertex_buffer: 0.692366 + vertex_buffer: 0.769720 + vertex_buffer: -6.095394 + vertex_buffer: 4.985883 + vertex_buffer: 0.534090 + vertex_buffer: 0.779141 + vertex_buffer: 0.699606 + vertex_buffer: -5.291850 + vertex_buffer: 5.448304 + vertex_buffer: 0.527671 + vertex_buffer: 0.736226 + vertex_buffer: 0.669687 + vertex_buffer: -4.949770 + vertex_buffer: 5.509612 + vertex_buffer: 0.526913 + vertex_buffer: 0.717857 + vertex_buffer: 0.630947 + vertex_buffer: -4.695101 + vertex_buffer: 5.449371 + vertex_buffer: 0.526878 + vertex_buffer: 0.704626 + vertex_buffer: 0.583218 + vertex_buffer: -4.517982 + vertex_buffer: 5.339869 + vertex_buffer: 0.526967 + vertex_buffer: 0.695278 + vertex_buffer: 1.537170 + vertex_buffer: -4.423206 + vertex_buffer: 4.745470 + vertex_buffer: 0.572058 + vertex_buffer: 0.695278 + vertex_buffer: 1.615600 + vertex_buffer: -4.475942 + vertex_buffer: 4.813632 + vertex_buffer: 0.573521 + vertex_buffer: 0.703540 + vertex_buffer: 1.729053 + vertex_buffer: -4.618680 + vertex_buffer: 4.854463 + vertex_buffer: 0.576838 + vertex_buffer: 0.711846 + vertex_buffer: 1.838624 + vertex_buffer: -4.828746 + vertex_buffer: 4.823737 + vertex_buffer: 0.581691 + vertex_buffer: 0.720063 + vertex_buffer: 2.368250 + vertex_buffer: -3.106237 + vertex_buffer: 4.868096 + vertex_buffer: 0.609945 + vertex_buffer: 0.639910 + vertex_buffer: 7.542244 + vertex_buffer: -1.049282 + vertex_buffer: -2.431321 + vertex_buffer: 0.986046 + vertex_buffer: 0.560034 + vertex_buffer: 1.826614 + vertex_buffer: -4.399531 + vertex_buffer: 4.399021 + vertex_buffer: 0.586800 + vertex_buffer: 0.695400 + vertex_buffer: 1.929558 + vertex_buffer: -4.411831 + vertex_buffer: 4.497052 + vertex_buffer: 0.590372 + vertex_buffer: 0.701823 + vertex_buffer: 0.597442 + vertex_buffer: -2.013686 + vertex_buffer: 5.866456 + vertex_buffer: 0.531915 + vertex_buffer: 0.601537 + vertex_buffer: 1.405627 + vertex_buffer: -1.714196 + vertex_buffer: 5.241087 + vertex_buffer: 0.577268 + vertex_buffer: 0.585935 + vertex_buffer: 0.662449 + vertex_buffer: -1.819321 + vertex_buffer: 5.863759 + vertex_buffer: 0.536915 + vertex_buffer: 0.593786 + vertex_buffer: 2.342340 + vertex_buffer: 0.572222 + vertex_buffer: 4.294303 + vertex_buffer: 0.627543 + vertex_buffer: 0.473352 + vertex_buffer: 3.327324 + vertex_buffer: 0.104863 + vertex_buffer: 4.113860 + vertex_buffer: 0.665586 + vertex_buffer: 0.495951 + vertex_buffer: 1.726175 + vertex_buffer: -0.919165 + vertex_buffer: 5.273355 + vertex_buffer: 0.588354 + vertex_buffer: 0.546862 + vertex_buffer: 5.133204 + vertex_buffer: 7.485602 + vertex_buffer: 2.660442 + vertex_buffer: 0.757824 + vertex_buffer: 0.147676 + vertex_buffer: 4.538641 + vertex_buffer: 6.319907 + vertex_buffer: 3.683424 + vertex_buffer: 0.709250 + vertex_buffer: 0.201508 + vertex_buffer: 3.986562 + vertex_buffer: 5.109487 + vertex_buffer: 4.466315 + vertex_buffer: 0.672684 + vertex_buffer: 0.256581 + vertex_buffer: 2.169681 + vertex_buffer: -5.440433 + vertex_buffer: 4.455874 + vertex_buffer: 0.600409 + vertex_buffer: 0.749005 + vertex_buffer: 1.395634 + vertex_buffer: 5.011963 + vertex_buffer: 5.316032 + vertex_buffer: 0.558266 + vertex_buffer: 0.261672 + vertex_buffer: 1.619500 + vertex_buffer: 6.599217 + vertex_buffer: 4.921106 + vertex_buffer: 0.570304 + vertex_buffer: 0.187871 + vertex_buffer: 1.891399 + vertex_buffer: 8.236377 + vertex_buffer: 4.274997 + vertex_buffer: 0.588166 + vertex_buffer: 0.109044 + vertex_buffer: 4.195832 + vertex_buffer: 2.235205 + vertex_buffer: 3.375099 + vertex_buffer: 0.711045 + vertex_buffer: 0.398952 + vertex_buffer: 5.733342 + vertex_buffer: 1.411738 + vertex_buffer: 2.431726 + vertex_buffer: 0.781070 + vertex_buffer: 0.435405 + vertex_buffer: 1.859887 + vertex_buffer: 2.355757 + vertex_buffer: 3.843181 + vertex_buffer: 0.587247 + vertex_buffer: 0.398932 + vertex_buffer: 4.988612 + vertex_buffer: 3.074654 + vertex_buffer: 3.083858 + vertex_buffer: 0.742870 + vertex_buffer: 0.355446 + vertex_buffer: 1.303263 + vertex_buffer: 1.416453 + vertex_buffer: 4.831091 + vertex_buffer: 0.572156 + vertex_buffer: 0.437652 + vertex_buffer: 1.305757 + vertex_buffer: -0.672779 + vertex_buffer: 6.415959 + vertex_buffer: 0.551868 + vertex_buffer: 0.536570 + vertex_buffer: 6.465170 + vertex_buffer: 0.937119 + vertex_buffer: 1.689873 + vertex_buffer: 0.821442 + vertex_buffer: 0.457556 + vertex_buffer: 5.258659 + vertex_buffer: 0.945811 + vertex_buffer: 2.974312 + vertex_buffer: 0.752702 + vertex_buffer: 0.457182 + vertex_buffer: 4.432338 + vertex_buffer: 0.722096 + vertex_buffer: 3.522615 + vertex_buffer: 0.713757 + vertex_buffer: 0.467627 + vertex_buffer: 3.300681 + vertex_buffer: 0.861641 + vertex_buffer: 3.872784 + vertex_buffer: 0.667113 + vertex_buffer: 0.460673 + vertex_buffer: 2.430178 + vertex_buffer: 1.131492 + vertex_buffer: 4.039035 + vertex_buffer: 0.631101 + vertex_buffer: 0.447154 + vertex_buffer: 1.820731 + vertex_buffer: 1.467954 + vertex_buffer: 4.224124 + vertex_buffer: 0.600862 + vertex_buffer: 0.432473 + vertex_buffer: 0.563221 + vertex_buffer: 2.307693 + vertex_buffer: 5.566789 + vertex_buffer: 0.523481 + vertex_buffer: 0.405627 + vertex_buffer: 6.338145 + vertex_buffer: -0.529279 + vertex_buffer: 1.881175 + vertex_buffer: 0.810748 + vertex_buffer: 0.523926 + vertex_buffer: 5.587698 + vertex_buffer: 3.208071 + vertex_buffer: 2.687839 + vertex_buffer: 0.771046 + vertex_buffer: 0.348959 + vertex_buffer: 0.242624 + vertex_buffer: -1.462857 + vertex_buffer: 7.071491 + vertex_buffer: 0.509127 + vertex_buffer: 0.562718 + vertex_buffer: 1.611251 + vertex_buffer: 0.339326 + vertex_buffer: 4.895421 + vertex_buffer: 0.595293 + vertex_buffer: 0.485024 + vertex_buffer: 7.743095 + vertex_buffer: 2.364999 + vertex_buffer: -2.005167 + vertex_buffer: 0.980531 + vertex_buffer: 0.401564 + vertex_buffer: 1.391142 + vertex_buffer: 1.851048 + vertex_buffer: 4.448999 + vertex_buffer: 0.573500 + vertex_buffer: 0.420000 + vertex_buffer: 1.785794 + vertex_buffer: -0.978284 + vertex_buffer: 4.850470 + vertex_buffer: 0.602995 + vertex_buffer: 0.548688 + vertex_buffer: 4.670959 + vertex_buffer: 2.664461 + vertex_buffer: 3.084075 + vertex_buffer: 0.733530 + vertex_buffer: 0.376977 + vertex_buffer: 1.333970 + vertex_buffer: -0.283761 + vertex_buffer: 6.097047 + vertex_buffer: 0.560611 + vertex_buffer: 0.519017 + vertex_buffer: 7.270895 + vertex_buffer: -2.890917 + vertex_buffer: -2.252455 + vertex_buffer: 0.967686 + vertex_buffer: 0.644357 + vertex_buffer: 1.856432 + vertex_buffer: 2.585245 + vertex_buffer: 3.757904 + vertex_buffer: 0.580985 + vertex_buffer: 0.387160 + vertex_buffer: 0.923388 + vertex_buffer: 0.073076 + vertex_buffer: 6.671944 + vertex_buffer: 0.537728 + vertex_buffer: 0.505385 + vertex_buffer: 5.000589 + vertex_buffer: -6.135128 + vertex_buffer: 1.892523 + vertex_buffer: 0.760966 + vertex_buffer: 0.779753 + vertex_buffer: 5.085276 + vertex_buffer: -7.178590 + vertex_buffer: 0.714711 + vertex_buffer: 0.801779 + vertex_buffer: 0.831938 + vertex_buffer: 7.159291 + vertex_buffer: -0.811820 + vertex_buffer: -0.072044 + vertex_buffer: 0.892441 + vertex_buffer: 0.540761 + vertex_buffer: 5.843051 + vertex_buffer: -5.248023 + vertex_buffer: 0.924091 + vertex_buffer: 0.816351 + vertex_buffer: 0.740260 + vertex_buffer: 6.847258 + vertex_buffer: 3.662916 + vertex_buffer: 0.724695 + vertex_buffer: 0.865595 + vertex_buffer: 0.333687 + vertex_buffer: 2.412942 + vertex_buffer: -8.258853 + vertex_buffer: 4.119213 + vertex_buffer: 0.614074 + vertex_buffer: 0.883246 + vertex_buffer: 0.179909 + vertex_buffer: -1.689864 + vertex_buffer: 6.573301 + vertex_buffer: 0.508953 + vertex_buffer: 0.579438 + vertex_buffer: 2.103655 + vertex_buffer: -0.163946 + vertex_buffer: 4.566119 + vertex_buffer: 0.617942 + vertex_buffer: 0.508316 + vertex_buffer: 6.407571 + vertex_buffer: 2.236021 + vertex_buffer: 1.560843 + vertex_buffer: 0.825608 + vertex_buffer: 0.397675 + vertex_buffer: 3.670075 + vertex_buffer: 2.360153 + vertex_buffer: 3.635230 + vertex_buffer: 0.681215 + vertex_buffer: 0.396235 + vertex_buffer: 3.177186 + vertex_buffer: 2.294265 + vertex_buffer: 3.775704 + vertex_buffer: 0.656636 + vertex_buffer: 0.400597 + vertex_buffer: 2.196121 + vertex_buffer: -4.598322 + vertex_buffer: 4.479786 + vertex_buffer: 0.603900 + vertex_buffer: 0.710217 + vertex_buffer: 6.234883 + vertex_buffer: -1.944430 + vertex_buffer: 1.663542 + vertex_buffer: 0.812086 + vertex_buffer: 0.588539 + vertex_buffer: 1.292924 + vertex_buffer: -9.295920 + vertex_buffer: 4.094063 + vertex_buffer: 0.568013 + vertex_buffer: 0.944565 + vertex_buffer: 3.210651 + vertex_buffer: -8.533278 + vertex_buffer: 2.802001 + vertex_buffer: 0.681008 + vertex_buffer: 0.898285 + vertex_buffer: 4.068926 + vertex_buffer: -7.993109 + vertex_buffer: 1.925119 + vertex_buffer: 0.733752 + vertex_buffer: 0.869701 + vertex_buffer: 2.724032 + vertex_buffer: 2.315802 + vertex_buffer: 3.777151 + vertex_buffer: 0.633830 + vertex_buffer: 0.398822 + vertex_buffer: 2.288460 + vertex_buffer: 2.398891 + vertex_buffer: 3.697603 + vertex_buffer: 0.606793 + vertex_buffer: 0.395537 + vertex_buffer: 1.998311 + vertex_buffer: 2.496547 + vertex_buffer: 3.689148 + vertex_buffer: 0.589660 + vertex_buffer: 0.391062 + vertex_buffer: 6.130040 + vertex_buffer: 3.399261 + vertex_buffer: 2.038516 + vertex_buffer: 0.805016 + vertex_buffer: 0.342108 + vertex_buffer: 2.288460 + vertex_buffer: 2.886504 + vertex_buffer: 3.775031 + vertex_buffer: 0.611335 + vertex_buffer: 0.362284 + vertex_buffer: 2.724032 + vertex_buffer: 2.961810 + vertex_buffer: 3.871767 + vertex_buffer: 0.634038 + vertex_buffer: 0.355971 + vertex_buffer: 3.177186 + vertex_buffer: 2.964136 + vertex_buffer: 3.876973 + vertex_buffer: 0.656636 + vertex_buffer: 0.355357 + vertex_buffer: 3.670075 + vertex_buffer: 2.927714 + vertex_buffer: 3.724325 + vertex_buffer: 0.681215 + vertex_buffer: 0.358340 + vertex_buffer: 4.018389 + vertex_buffer: 2.857357 + vertex_buffer: 3.482983 + vertex_buffer: 0.698585 + vertex_buffer: 0.363156 + vertex_buffer: 7.555811 + vertex_buffer: 4.106811 + vertex_buffer: -0.991917 + vertex_buffer: 0.941867 + vertex_buffer: 0.319076 + vertex_buffer: 4.018389 + vertex_buffer: 2.483695 + vertex_buffer: 3.440898 + vertex_buffer: 0.698585 + vertex_buffer: 0.387449 + vertex_buffer: 1.776217 + vertex_buffer: -2.683946 + vertex_buffer: 5.213116 + vertex_buffer: 0.584177 + vertex_buffer: 0.624107 + vertex_buffer: 1.222237 + vertex_buffer: -1.182444 + vertex_buffer: 5.952465 + vertex_buffer: 0.554318 + vertex_buffer: 0.566077 + vertex_buffer: 0.731493 + vertex_buffer: -2.536683 + vertex_buffer: 5.815343 + vertex_buffer: 0.534154 + vertex_buffer: 0.620640 + vertex_buffer: 4.135272 + vertex_buffer: -6.996638 + vertex_buffer: 2.671970 + vertex_buffer: 0.711218 + vertex_buffer: 0.819975 + vertex_buffer: 3.311811 + vertex_buffer: -7.660815 + vertex_buffer: 3.382963 + vertex_buffer: 0.664630 + vertex_buffer: 0.852871 + vertex_buffer: 1.313701 + vertex_buffer: -8.639995 + vertex_buffer: 4.702456 + vertex_buffer: 0.559100 + vertex_buffer: 0.902632 + vertex_buffer: 5.940524 + vertex_buffer: -6.223629 + vertex_buffer: -0.631468 + vertex_buffer: 0.871706 + vertex_buffer: 0.791941 + vertex_buffer: 1.998311 + vertex_buffer: 2.743838 + vertex_buffer: 3.744030 + vertex_buffer: 0.591234 + vertex_buffer: 0.373894 + vertex_buffer: 0.901447 + vertex_buffer: 1.236992 + vertex_buffer: 5.754256 + vertex_buffer: 0.544341 + vertex_buffer: 0.451584 + vertex_buffer: 2.308977 + vertex_buffer: -8.974196 + vertex_buffer: 3.609070 + vertex_buffer: 0.624563 + vertex_buffer: 0.924192 + vertex_buffer: 6.954154 + vertex_buffer: -2.439843 + vertex_buffer: -0.131163 + vertex_buffer: 0.885770 + vertex_buffer: 0.615029 + vertex_buffer: 1.098819 + vertex_buffer: -4.458788 + vertex_buffer: 5.120727 + vertex_buffer: 0.551338 + vertex_buffer: 0.695278 + vertex_buffer: 1.181124 + vertex_buffer: -4.579996 + vertex_buffer: 5.189564 + vertex_buffer: 0.551980 + vertex_buffer: 0.704632 + vertex_buffer: 1.255818 + vertex_buffer: -4.787901 + vertex_buffer: 5.237051 + vertex_buffer: 0.552888 + vertex_buffer: 0.715808 + vertex_buffer: 1.325085 + vertex_buffer: -5.106507 + vertex_buffer: 5.205010 + vertex_buffer: 0.555168 + vertex_buffer: 0.730794 + vertex_buffer: 1.546388 + vertex_buffer: -5.819392 + vertex_buffer: 4.757893 + vertex_buffer: 0.569944 + vertex_buffer: 0.767035 + vertex_buffer: 1.953754 + vertex_buffer: -4.183892 + vertex_buffer: 4.431713 + vertex_buffer: 0.593203 + vertex_buffer: 0.685676 + vertex_buffer: 2.117802 + vertex_buffer: -4.137093 + vertex_buffer: 4.555096 + vertex_buffer: 0.599262 + vertex_buffer: 0.681069 + vertex_buffer: 2.285339 + vertex_buffer: -4.051196 + vertex_buffer: 4.582438 + vertex_buffer: 0.607600 + vertex_buffer: 0.677703 + vertex_buffer: 2.850160 + vertex_buffer: -3.665720 + vertex_buffer: 4.484994 + vertex_buffer: 0.631938 + vertex_buffer: 0.663500 + vertex_buffer: 5.278538 + vertex_buffer: -2.238942 + vertex_buffer: 2.861224 + vertex_buffer: 0.752033 + vertex_buffer: 0.601315 + vertex_buffer: 0.946709 + vertex_buffer: 1.907628 + vertex_buffer: 5.196779 + vertex_buffer: 0.547226 + vertex_buffer: 0.420395 + vertex_buffer: 1.314173 + vertex_buffer: 3.104912 + vertex_buffer: 4.231404 + vertex_buffer: 0.563544 + vertex_buffer: 0.359828 + vertex_buffer: 1.780000 + vertex_buffer: 2.860000 + vertex_buffer: 3.881555 + vertex_buffer: 0.583841 + vertex_buffer: 0.368714 + vertex_buffer: 1.845110 + vertex_buffer: -4.098880 + vertex_buffer: 4.247264 + vertex_buffer: 0.586614 + vertex_buffer: 0.692366 + vertex_buffer: 5.436187 + vertex_buffer: -4.030482 + vertex_buffer: 2.109852 + vertex_buffer: 0.771915 + vertex_buffer: 0.683578 + vertex_buffer: 0.766444 + vertex_buffer: 3.182131 + vertex_buffer: 4.861453 + vertex_buffer: 0.531597 + vertex_buffer: 0.352483 + vertex_buffer: 1.938616 + vertex_buffer: -6.614410 + vertex_buffer: 4.521085 + vertex_buffer: 0.588371 + vertex_buffer: 0.804441 + vertex_buffer: 0.516573 + vertex_buffer: 1.583572 + vertex_buffer: 6.148363 + vertex_buffer: 0.520797 + vertex_buffer: 0.442565 + vertex_buffer: 1.246815 + vertex_buffer: 0.230297 + vertex_buffer: 5.681036 + vertex_buffer: 0.567985 + vertex_buffer: 0.493479 + vertex_buffer: 0.997827 + vertex_buffer: -6.930921 + vertex_buffer: 4.979576 + vertex_buffer: 0.543283 + vertex_buffer: 0.819255 + vertex_buffer: 3.288807 + vertex_buffer: -5.382514 + vertex_buffer: 3.795752 + vertex_buffer: 0.655317 + vertex_buffer: 0.745515 + vertex_buffer: 2.311631 + vertex_buffer: -1.566237 + vertex_buffer: 4.590085 + vertex_buffer: 0.621009 + vertex_buffer: 0.574018 + vertex_buffer: 2.680250 + vertex_buffer: -6.111567 + vertex_buffer: 4.096152 + vertex_buffer: 0.625560 + vertex_buffer: 0.780312 + vertex_buffer: 3.832928 + vertex_buffer: -1.537326 + vertex_buffer: 4.137731 + vertex_buffer: 0.680198 + vertex_buffer: 0.570719 + vertex_buffer: 2.961860 + vertex_buffer: -2.274215 + vertex_buffer: 4.440943 + vertex_buffer: 0.642764 + vertex_buffer: 0.604338 + vertex_buffer: 4.386901 + vertex_buffer: -2.683286 + vertex_buffer: 3.643886 + vertex_buffer: 0.704663 + vertex_buffer: 0.621530 + vertex_buffer: 1.217295 + vertex_buffer: -7.834465 + vertex_buffer: 4.969286 + vertex_buffer: 0.552012 + vertex_buffer: 0.862592 + vertex_buffer: 1.542374 + vertex_buffer: -0.136843 + vertex_buffer: 5.201008 + vertex_buffer: 0.589072 + vertex_buffer: 0.508637 + vertex_buffer: 3.878377 + vertex_buffer: -6.041764 + vertex_buffer: 3.311079 + vertex_buffer: 0.685945 + vertex_buffer: 0.775357 + vertex_buffer: 3.084037 + vertex_buffer: -6.809842 + vertex_buffer: 3.814195 + vertex_buffer: 0.645735 + vertex_buffer: 0.812640 + vertex_buffer: 3.747321 + vertex_buffer: -4.503545 + vertex_buffer: 3.726453 + vertex_buffer: 0.675343 + vertex_buffer: 0.703978 + vertex_buffer: 6.094129 + vertex_buffer: -3.205991 + vertex_buffer: 1.473482 + vertex_buffer: 0.810858 + vertex_buffer: 0.646305 + vertex_buffer: 4.588995 + vertex_buffer: -4.728726 + vertex_buffer: 2.983221 + vertex_buffer: 0.720122 + vertex_buffer: 0.714667 + vertex_buffer: 6.583231 + vertex_buffer: -3.941269 + vertex_buffer: 0.070268 + vertex_buffer: 0.866152 + vertex_buffer: 0.682705 + vertex_buffer: 3.492580 + vertex_buffer: -3.195820 + vertex_buffer: 4.130198 + vertex_buffer: 0.663187 + vertex_buffer: 0.644597 + vertex_buffer: 1.255543 + vertex_buffer: 0.802341 + vertex_buffer: 5.307551 + vertex_buffer: 0.570082 + vertex_buffer: 0.466326 + vertex_buffer: 1.126122 + vertex_buffer: -0.933602 + vertex_buffer: 6.538785 + vertex_buffer: 0.544562 + vertex_buffer: 0.548376 + vertex_buffer: 1.443109 + vertex_buffer: -1.142774 + vertex_buffer: 5.905127 + vertex_buffer: 0.562759 + vertex_buffer: 0.558785 + vertex_buffer: 0.923043 + vertex_buffer: -0.529042 + vertex_buffer: 7.003423 + vertex_buffer: 0.531987 + vertex_buffer: 0.530140 + vertex_buffer: 1.755386 + vertex_buffer: 3.529117 + vertex_buffer: 4.327696 + vertex_buffer: 0.585271 + vertex_buffer: 0.335177 + vertex_buffer: 2.632589 + vertex_buffer: 3.713828 + vertex_buffer: 4.364629 + vertex_buffer: 0.622953 + vertex_buffer: 0.322779 + vertex_buffer: 3.388062 + vertex_buffer: 3.721976 + vertex_buffer: 4.309028 + vertex_buffer: 0.655896 + vertex_buffer: 0.320163 + vertex_buffer: 4.075766 + vertex_buffer: 3.675413 + vertex_buffer: 4.076063 + vertex_buffer: 0.687132 + vertex_buffer: 0.322346 + vertex_buffer: 4.622910 + vertex_buffer: 3.474691 + vertex_buffer: 3.646321 + vertex_buffer: 0.716482 + vertex_buffer: 0.333201 + vertex_buffer: 5.171755 + vertex_buffer: 2.535753 + vertex_buffer: 2.670867 + vertex_buffer: 0.758757 + vertex_buffer: 0.382787 + vertex_buffer: 7.297331 + vertex_buffer: 0.763172 + vertex_buffer: -0.048769 + vertex_buffer: 0.897013 + vertex_buffer: 0.468769 + vertex_buffer: 4.706828 + vertex_buffer: 1.651000 + vertex_buffer: 3.109532 + vertex_buffer: 0.732392 + vertex_buffer: 0.424547 + vertex_buffer: 4.071712 + vertex_buffer: 1.476821 + vertex_buffer: 3.476944 + vertex_buffer: 0.702114 + vertex_buffer: 0.433163 + vertex_buffer: 3.269817 + vertex_buffer: 1.470659 + vertex_buffer: 3.731945 + vertex_buffer: 0.666525 + vertex_buffer: 0.433866 + vertex_buffer: 2.527572 + vertex_buffer: 1.617311 + vertex_buffer: 3.865444 + vertex_buffer: 0.633505 + vertex_buffer: 0.426088 + vertex_buffer: 1.970894 + vertex_buffer: 1.858505 + vertex_buffer: 3.961782 + vertex_buffer: 0.603876 + vertex_buffer: 0.416587 + vertex_buffer: 1.579543 + vertex_buffer: 2.097941 + vertex_buffer: 4.084996 + vertex_buffer: 0.579658 + vertex_buffer: 0.409945 + vertex_buffer: 7.664182 + vertex_buffer: 0.673132 + vertex_buffer: -2.435867 + vertex_buffer: 0.992440 + vertex_buffer: 0.480777 + vertex_buffer: 1.397041 + vertex_buffer: -1.340139 + vertex_buffer: 5.630378 + vertex_buffer: 0.567192 + vertex_buffer: 0.569420 + vertex_buffer: 0.884838 + vertex_buffer: 0.658740 + vertex_buffer: 6.233232 + vertex_buffer: 0.541366 + vertex_buffer: 0.478899 + vertex_buffer: 0.767097 + vertex_buffer: -0.968035 + vertex_buffer: 7.077932 + vertex_buffer: 0.526564 + vertex_buffer: 0.546118 + vertex_buffer: 0.460213 + vertex_buffer: -1.334106 + vertex_buffer: 6.787447 + vertex_buffer: 0.523913 + vertex_buffer: 0.563830 + vertex_buffer: 0.748618 + vertex_buffer: -1.067994 + vertex_buffer: 6.798303 + vertex_buffer: 0.531529 + vertex_buffer: 0.555057 + vertex_buffer: 1.236408 + vertex_buffer: -1.585568 + vertex_buffer: 5.480490 + vertex_buffer: 0.566036 + vertex_buffer: 0.582329 + vertex_buffer: 0.387306 + vertex_buffer: -1.409990 + vertex_buffer: 6.957705 + vertex_buffer: 0.516311 + vertex_buffer: 0.563054 + vertex_buffer: 0.319925 + vertex_buffer: -1.607931 + vertex_buffer: 6.508676 + vertex_buffer: 0.517472 + vertex_buffer: 0.577877 + vertex_buffer: 1.639633 + vertex_buffer: 2.556298 + vertex_buffer: 3.863736 + vertex_buffer: 0.573595 + vertex_buffer: 0.389807 + vertex_buffer: 1.255645 + vertex_buffer: 2.467144 + vertex_buffer: 4.203800 + vertex_buffer: 0.560698 + vertex_buffer: 0.395332 + vertex_buffer: 1.031362 + vertex_buffer: 2.382663 + vertex_buffer: 4.615849 + vertex_buffer: 0.549756 + vertex_buffer: 0.399751 + vertex_buffer: 4.253081 + vertex_buffer: 2.772296 + vertex_buffer: 3.315305 + vertex_buffer: 0.710288 + vertex_buffer: 0.368253 + vertex_buffer: 4.530000 + vertex_buffer: 2.910000 + vertex_buffer: 3.339685 + vertex_buffer: 0.723330 + vertex_buffer: 0.363373 + index_buffer: 173 + index_buffer: 155 + index_buffer: 133 + index_buffer: 246 + index_buffer: 33 + index_buffer: 7 + index_buffer: 382 + index_buffer: 398 + index_buffer: 362 + index_buffer: 263 + index_buffer: 466 + index_buffer: 249 + index_buffer: 308 + index_buffer: 415 + index_buffer: 324 + index_buffer: 78 + index_buffer: 95 + index_buffer: 191 + index_buffer: 356 + index_buffer: 389 + index_buffer: 264 + index_buffer: 127 + index_buffer: 34 + index_buffer: 162 + index_buffer: 368 + index_buffer: 264 + index_buffer: 389 + index_buffer: 139 + index_buffer: 162 + index_buffer: 34 + index_buffer: 267 + index_buffer: 0 + index_buffer: 302 + index_buffer: 37 + index_buffer: 72 + index_buffer: 0 + index_buffer: 11 + index_buffer: 302 + index_buffer: 0 + index_buffer: 11 + index_buffer: 0 + index_buffer: 72 + index_buffer: 349 + index_buffer: 451 + index_buffer: 350 + index_buffer: 120 + index_buffer: 121 + index_buffer: 231 + index_buffer: 452 + index_buffer: 350 + index_buffer: 451 + index_buffer: 232 + index_buffer: 231 + index_buffer: 121 + index_buffer: 267 + index_buffer: 302 + index_buffer: 269 + index_buffer: 37 + index_buffer: 39 + index_buffer: 72 + index_buffer: 303 + index_buffer: 269 + index_buffer: 302 + index_buffer: 73 + index_buffer: 72 + index_buffer: 39 + index_buffer: 357 + index_buffer: 343 + index_buffer: 350 + index_buffer: 128 + index_buffer: 121 + index_buffer: 114 + index_buffer: 277 + index_buffer: 350 + index_buffer: 343 + index_buffer: 47 + index_buffer: 114 + index_buffer: 121 + index_buffer: 350 + index_buffer: 452 + index_buffer: 357 + index_buffer: 121 + index_buffer: 128 + index_buffer: 232 + index_buffer: 453 + index_buffer: 357 + index_buffer: 452 + index_buffer: 233 + index_buffer: 232 + index_buffer: 128 + index_buffer: 299 + index_buffer: 333 + index_buffer: 297 + index_buffer: 69 + index_buffer: 67 + index_buffer: 104 + index_buffer: 332 + index_buffer: 297 + index_buffer: 333 + index_buffer: 103 + index_buffer: 104 + index_buffer: 67 + index_buffer: 175 + index_buffer: 152 + index_buffer: 396 + index_buffer: 175 + index_buffer: 171 + index_buffer: 152 + index_buffer: 377 + index_buffer: 396 + index_buffer: 152 + index_buffer: 148 + index_buffer: 152 + index_buffer: 171 + index_buffer: 381 + index_buffer: 384 + index_buffer: 382 + index_buffer: 154 + index_buffer: 155 + index_buffer: 157 + index_buffer: 398 + index_buffer: 382 + index_buffer: 384 + index_buffer: 173 + index_buffer: 157 + index_buffer: 155 + index_buffer: 280 + index_buffer: 347 + index_buffer: 330 + index_buffer: 50 + index_buffer: 101 + index_buffer: 118 + index_buffer: 348 + index_buffer: 330 + index_buffer: 347 + index_buffer: 119 + index_buffer: 118 + index_buffer: 101 + index_buffer: 269 + index_buffer: 303 + index_buffer: 270 + index_buffer: 39 + index_buffer: 40 + index_buffer: 73 + index_buffer: 304 + index_buffer: 270 + index_buffer: 303 + index_buffer: 74 + index_buffer: 73 + index_buffer: 40 + index_buffer: 9 + index_buffer: 336 + index_buffer: 151 + index_buffer: 9 + index_buffer: 151 + index_buffer: 107 + index_buffer: 337 + index_buffer: 151 + index_buffer: 336 + index_buffer: 108 + index_buffer: 107 + index_buffer: 151 + index_buffer: 344 + index_buffer: 278 + index_buffer: 360 + index_buffer: 115 + index_buffer: 131 + index_buffer: 48 + index_buffer: 279 + index_buffer: 360 + index_buffer: 278 + index_buffer: 49 + index_buffer: 48 + index_buffer: 131 + index_buffer: 262 + index_buffer: 431 + index_buffer: 418 + index_buffer: 32 + index_buffer: 194 + index_buffer: 211 + index_buffer: 424 + index_buffer: 418 + index_buffer: 431 + index_buffer: 204 + index_buffer: 211 + index_buffer: 194 + index_buffer: 304 + index_buffer: 408 + index_buffer: 270 + index_buffer: 74 + index_buffer: 40 + index_buffer: 184 + index_buffer: 409 + index_buffer: 270 + index_buffer: 408 + index_buffer: 185 + index_buffer: 184 + index_buffer: 40 + index_buffer: 272 + index_buffer: 310 + index_buffer: 407 + index_buffer: 42 + index_buffer: 183 + index_buffer: 80 + index_buffer: 415 + index_buffer: 407 + index_buffer: 310 + index_buffer: 191 + index_buffer: 80 + index_buffer: 183 + index_buffer: 322 + index_buffer: 270 + index_buffer: 410 + index_buffer: 92 + index_buffer: 186 + index_buffer: 40 + index_buffer: 409 + index_buffer: 410 + index_buffer: 270 + index_buffer: 185 + index_buffer: 40 + index_buffer: 186 + index_buffer: 347 + index_buffer: 449 + index_buffer: 348 + index_buffer: 118 + index_buffer: 119 + index_buffer: 229 + index_buffer: 450 + index_buffer: 348 + index_buffer: 449 + index_buffer: 230 + index_buffer: 229 + index_buffer: 119 + index_buffer: 434 + index_buffer: 432 + index_buffer: 430 + index_buffer: 214 + index_buffer: 210 + index_buffer: 212 + index_buffer: 422 + index_buffer: 430 + index_buffer: 432 + index_buffer: 202 + index_buffer: 212 + index_buffer: 210 + index_buffer: 313 + index_buffer: 314 + index_buffer: 18 + index_buffer: 83 + index_buffer: 18 + index_buffer: 84 + index_buffer: 17 + index_buffer: 18 + index_buffer: 314 + index_buffer: 17 + index_buffer: 84 + index_buffer: 18 + index_buffer: 307 + index_buffer: 375 + index_buffer: 306 + index_buffer: 77 + index_buffer: 76 + index_buffer: 146 + index_buffer: 291 + index_buffer: 306 + index_buffer: 375 + index_buffer: 61 + index_buffer: 146 + index_buffer: 76 + index_buffer: 259 + index_buffer: 387 + index_buffer: 260 + index_buffer: 29 + index_buffer: 30 + index_buffer: 160 + index_buffer: 388 + index_buffer: 260 + index_buffer: 387 + index_buffer: 161 + index_buffer: 160 + index_buffer: 30 + index_buffer: 286 + index_buffer: 414 + index_buffer: 384 + index_buffer: 56 + index_buffer: 157 + index_buffer: 190 + index_buffer: 398 + index_buffer: 384 + index_buffer: 414 + index_buffer: 173 + index_buffer: 190 + index_buffer: 157 + index_buffer: 418 + index_buffer: 424 + index_buffer: 406 + index_buffer: 194 + index_buffer: 182 + index_buffer: 204 + index_buffer: 335 + index_buffer: 406 + index_buffer: 424 + index_buffer: 106 + index_buffer: 204 + index_buffer: 182 + index_buffer: 367 + index_buffer: 416 + index_buffer: 364 + index_buffer: 138 + index_buffer: 135 + index_buffer: 192 + index_buffer: 434 + index_buffer: 364 + index_buffer: 416 + index_buffer: 214 + index_buffer: 192 + index_buffer: 135 + index_buffer: 391 + index_buffer: 423 + index_buffer: 327 + index_buffer: 165 + index_buffer: 98 + index_buffer: 203 + index_buffer: 358 + index_buffer: 327 + index_buffer: 423 + index_buffer: 129 + index_buffer: 203 + index_buffer: 98 + index_buffer: 298 + index_buffer: 301 + index_buffer: 284 + index_buffer: 68 + index_buffer: 54 + index_buffer: 71 + index_buffer: 251 + index_buffer: 284 + index_buffer: 301 + index_buffer: 21 + index_buffer: 71 + index_buffer: 54 + index_buffer: 4 + index_buffer: 275 + index_buffer: 5 + index_buffer: 4 + index_buffer: 5 + index_buffer: 45 + index_buffer: 281 + index_buffer: 5 + index_buffer: 275 + index_buffer: 51 + index_buffer: 45 + index_buffer: 5 + index_buffer: 254 + index_buffer: 373 + index_buffer: 253 + index_buffer: 24 + index_buffer: 23 + index_buffer: 144 + index_buffer: 374 + index_buffer: 253 + index_buffer: 373 + index_buffer: 145 + index_buffer: 144 + index_buffer: 23 + index_buffer: 320 + index_buffer: 321 + index_buffer: 307 + index_buffer: 90 + index_buffer: 77 + index_buffer: 91 + index_buffer: 375 + index_buffer: 307 + index_buffer: 321 + index_buffer: 146 + index_buffer: 91 + index_buffer: 77 + index_buffer: 280 + index_buffer: 425 + index_buffer: 411 + index_buffer: 50 + index_buffer: 187 + index_buffer: 205 + index_buffer: 427 + index_buffer: 411 + index_buffer: 425 + index_buffer: 207 + index_buffer: 205 + index_buffer: 187 + index_buffer: 421 + index_buffer: 313 + index_buffer: 200 + index_buffer: 201 + index_buffer: 200 + index_buffer: 83 + index_buffer: 18 + index_buffer: 200 + index_buffer: 313 + index_buffer: 18 + index_buffer: 83 + index_buffer: 200 + index_buffer: 335 + index_buffer: 321 + index_buffer: 406 + index_buffer: 106 + index_buffer: 182 + index_buffer: 91 + index_buffer: 405 + index_buffer: 406 + index_buffer: 321 + index_buffer: 181 + index_buffer: 91 + index_buffer: 182 + index_buffer: 405 + index_buffer: 321 + index_buffer: 404 + index_buffer: 181 + index_buffer: 180 + index_buffer: 91 + index_buffer: 320 + index_buffer: 404 + index_buffer: 321 + index_buffer: 90 + index_buffer: 91 + index_buffer: 180 + index_buffer: 17 + index_buffer: 314 + index_buffer: 16 + index_buffer: 17 + index_buffer: 16 + index_buffer: 84 + index_buffer: 315 + index_buffer: 16 + index_buffer: 314 + index_buffer: 85 + index_buffer: 84 + index_buffer: 16 + index_buffer: 425 + index_buffer: 266 + index_buffer: 426 + index_buffer: 205 + index_buffer: 206 + index_buffer: 36 + index_buffer: 423 + index_buffer: 426 + index_buffer: 266 + index_buffer: 203 + index_buffer: 36 + index_buffer: 206 + index_buffer: 369 + index_buffer: 396 + index_buffer: 400 + index_buffer: 140 + index_buffer: 176 + index_buffer: 171 + index_buffer: 377 + index_buffer: 400 + index_buffer: 396 + index_buffer: 148 + index_buffer: 171 + index_buffer: 176 + index_buffer: 391 + index_buffer: 269 + index_buffer: 322 + index_buffer: 165 + index_buffer: 92 + index_buffer: 39 + index_buffer: 270 + index_buffer: 322 + index_buffer: 269 + index_buffer: 40 + index_buffer: 39 + index_buffer: 92 + index_buffer: 417 + index_buffer: 465 + index_buffer: 413 + index_buffer: 193 + index_buffer: 189 + index_buffer: 245 + index_buffer: 464 + index_buffer: 413 + index_buffer: 465 + index_buffer: 244 + index_buffer: 245 + index_buffer: 189 + index_buffer: 257 + index_buffer: 258 + index_buffer: 386 + index_buffer: 27 + index_buffer: 159 + index_buffer: 28 + index_buffer: 385 + index_buffer: 386 + index_buffer: 258 + index_buffer: 158 + index_buffer: 28 + index_buffer: 159 + index_buffer: 260 + index_buffer: 388 + index_buffer: 467 + index_buffer: 30 + index_buffer: 247 + index_buffer: 161 + index_buffer: 466 + index_buffer: 467 + index_buffer: 388 + index_buffer: 246 + index_buffer: 161 + index_buffer: 247 + index_buffer: 248 + index_buffer: 456 + index_buffer: 419 + index_buffer: 3 + index_buffer: 196 + index_buffer: 236 + index_buffer: 399 + index_buffer: 419 + index_buffer: 456 + index_buffer: 174 + index_buffer: 236 + index_buffer: 196 + index_buffer: 333 + index_buffer: 298 + index_buffer: 332 + index_buffer: 104 + index_buffer: 103 + index_buffer: 68 + index_buffer: 284 + index_buffer: 332 + index_buffer: 298 + index_buffer: 54 + index_buffer: 68 + index_buffer: 103 + index_buffer: 285 + index_buffer: 8 + index_buffer: 417 + index_buffer: 55 + index_buffer: 193 + index_buffer: 8 + index_buffer: 168 + index_buffer: 417 + index_buffer: 8 + index_buffer: 168 + index_buffer: 8 + index_buffer: 193 + index_buffer: 340 + index_buffer: 261 + index_buffer: 346 + index_buffer: 111 + index_buffer: 117 + index_buffer: 31 + index_buffer: 448 + index_buffer: 346 + index_buffer: 261 + index_buffer: 228 + index_buffer: 31 + index_buffer: 117 + index_buffer: 285 + index_buffer: 417 + index_buffer: 441 + index_buffer: 55 + index_buffer: 221 + index_buffer: 193 + index_buffer: 413 + index_buffer: 441 + index_buffer: 417 + index_buffer: 189 + index_buffer: 193 + index_buffer: 221 + index_buffer: 327 + index_buffer: 460 + index_buffer: 326 + index_buffer: 98 + index_buffer: 97 + index_buffer: 240 + index_buffer: 328 + index_buffer: 326 + index_buffer: 460 + index_buffer: 99 + index_buffer: 240 + index_buffer: 97 + index_buffer: 277 + index_buffer: 355 + index_buffer: 329 + index_buffer: 47 + index_buffer: 100 + index_buffer: 126 + index_buffer: 371 + index_buffer: 329 + index_buffer: 355 + index_buffer: 142 + index_buffer: 126 + index_buffer: 100 + index_buffer: 309 + index_buffer: 392 + index_buffer: 438 + index_buffer: 79 + index_buffer: 218 + index_buffer: 166 + index_buffer: 439 + index_buffer: 438 + index_buffer: 392 + index_buffer: 219 + index_buffer: 166 + index_buffer: 218 + index_buffer: 381 + index_buffer: 382 + index_buffer: 256 + index_buffer: 154 + index_buffer: 26 + index_buffer: 155 + index_buffer: 341 + index_buffer: 256 + index_buffer: 382 + index_buffer: 112 + index_buffer: 155 + index_buffer: 26 + index_buffer: 360 + index_buffer: 279 + index_buffer: 420 + index_buffer: 131 + index_buffer: 198 + index_buffer: 49 + index_buffer: 429 + index_buffer: 420 + index_buffer: 279 + index_buffer: 209 + index_buffer: 49 + index_buffer: 198 + index_buffer: 365 + index_buffer: 364 + index_buffer: 379 + index_buffer: 136 + index_buffer: 150 + index_buffer: 135 + index_buffer: 394 + index_buffer: 379 + index_buffer: 364 + index_buffer: 169 + index_buffer: 135 + index_buffer: 150 + index_buffer: 355 + index_buffer: 277 + index_buffer: 437 + index_buffer: 126 + index_buffer: 217 + index_buffer: 47 + index_buffer: 343 + index_buffer: 437 + index_buffer: 277 + index_buffer: 114 + index_buffer: 47 + index_buffer: 217 + index_buffer: 443 + index_buffer: 444 + index_buffer: 282 + index_buffer: 223 + index_buffer: 52 + index_buffer: 224 + index_buffer: 283 + index_buffer: 282 + index_buffer: 444 + index_buffer: 53 + index_buffer: 224 + index_buffer: 52 + index_buffer: 281 + index_buffer: 275 + index_buffer: 363 + index_buffer: 51 + index_buffer: 134 + index_buffer: 45 + index_buffer: 440 + index_buffer: 363 + index_buffer: 275 + index_buffer: 220 + index_buffer: 45 + index_buffer: 134 + index_buffer: 431 + index_buffer: 262 + index_buffer: 395 + index_buffer: 211 + index_buffer: 170 + index_buffer: 32 + index_buffer: 369 + index_buffer: 395 + index_buffer: 262 + index_buffer: 140 + index_buffer: 32 + index_buffer: 170 + index_buffer: 337 + index_buffer: 299 + index_buffer: 338 + index_buffer: 108 + index_buffer: 109 + index_buffer: 69 + index_buffer: 297 + index_buffer: 338 + index_buffer: 299 + index_buffer: 67 + index_buffer: 69 + index_buffer: 109 + index_buffer: 335 + index_buffer: 273 + index_buffer: 321 + index_buffer: 106 + index_buffer: 91 + index_buffer: 43 + index_buffer: 375 + index_buffer: 321 + index_buffer: 273 + index_buffer: 146 + index_buffer: 43 + index_buffer: 91 + index_buffer: 348 + index_buffer: 450 + index_buffer: 349 + index_buffer: 119 + index_buffer: 120 + index_buffer: 230 + index_buffer: 451 + index_buffer: 349 + index_buffer: 450 + index_buffer: 231 + index_buffer: 230 + index_buffer: 120 + index_buffer: 467 + index_buffer: 359 + index_buffer: 342 + index_buffer: 247 + index_buffer: 113 + index_buffer: 130 + index_buffer: 446 + index_buffer: 342 + index_buffer: 359 + index_buffer: 226 + index_buffer: 130 + index_buffer: 113 + index_buffer: 282 + index_buffer: 283 + index_buffer: 334 + index_buffer: 52 + index_buffer: 105 + index_buffer: 53 + index_buffer: 293 + index_buffer: 334 + index_buffer: 283 + index_buffer: 63 + index_buffer: 53 + index_buffer: 105 + index_buffer: 250 + index_buffer: 458 + index_buffer: 462 + index_buffer: 20 + index_buffer: 242 + index_buffer: 238 + index_buffer: 461 + index_buffer: 462 + index_buffer: 458 + index_buffer: 241 + index_buffer: 238 + index_buffer: 242 + index_buffer: 276 + index_buffer: 353 + index_buffer: 300 + index_buffer: 46 + index_buffer: 70 + index_buffer: 124 + index_buffer: 383 + index_buffer: 300 + index_buffer: 353 + index_buffer: 156 + index_buffer: 124 + index_buffer: 70 + index_buffer: 325 + index_buffer: 292 + index_buffer: 324 + index_buffer: 96 + index_buffer: 95 + index_buffer: 62 + index_buffer: 308 + index_buffer: 324 + index_buffer: 292 + index_buffer: 78 + index_buffer: 62 + index_buffer: 95 + index_buffer: 283 + index_buffer: 276 + index_buffer: 293 + index_buffer: 53 + index_buffer: 63 + index_buffer: 46 + index_buffer: 300 + index_buffer: 293 + index_buffer: 276 + index_buffer: 70 + index_buffer: 46 + index_buffer: 63 + index_buffer: 447 + index_buffer: 264 + index_buffer: 345 + index_buffer: 227 + index_buffer: 116 + index_buffer: 34 + index_buffer: 372 + index_buffer: 345 + index_buffer: 264 + index_buffer: 143 + index_buffer: 34 + index_buffer: 116 + index_buffer: 352 + index_buffer: 345 + index_buffer: 346 + index_buffer: 123 + index_buffer: 117 + index_buffer: 116 + index_buffer: 340 + index_buffer: 346 + index_buffer: 345 + index_buffer: 111 + index_buffer: 116 + index_buffer: 117 + index_buffer: 1 + index_buffer: 19 + index_buffer: 274 + index_buffer: 1 + index_buffer: 44 + index_buffer: 19 + index_buffer: 354 + index_buffer: 274 + index_buffer: 19 + index_buffer: 125 + index_buffer: 19 + index_buffer: 44 + index_buffer: 248 + index_buffer: 281 + index_buffer: 456 + index_buffer: 3 + index_buffer: 236 + index_buffer: 51 + index_buffer: 363 + index_buffer: 456 + index_buffer: 281 + index_buffer: 134 + index_buffer: 51 + index_buffer: 236 + index_buffer: 425 + index_buffer: 426 + index_buffer: 427 + index_buffer: 205 + index_buffer: 207 + index_buffer: 206 + index_buffer: 436 + index_buffer: 427 + index_buffer: 426 + index_buffer: 216 + index_buffer: 206 + index_buffer: 207 + index_buffer: 380 + index_buffer: 381 + index_buffer: 252 + index_buffer: 153 + index_buffer: 22 + index_buffer: 154 + index_buffer: 256 + index_buffer: 252 + index_buffer: 381 + index_buffer: 26 + index_buffer: 154 + index_buffer: 22 + index_buffer: 391 + index_buffer: 393 + index_buffer: 269 + index_buffer: 165 + index_buffer: 39 + index_buffer: 167 + index_buffer: 267 + index_buffer: 269 + index_buffer: 393 + index_buffer: 37 + index_buffer: 167 + index_buffer: 39 + index_buffer: 199 + index_buffer: 428 + index_buffer: 200 + index_buffer: 199 + index_buffer: 200 + index_buffer: 208 + index_buffer: 421 + index_buffer: 200 + index_buffer: 428 + index_buffer: 201 + index_buffer: 208 + index_buffer: 200 + index_buffer: 330 + index_buffer: 329 + index_buffer: 266 + index_buffer: 101 + index_buffer: 36 + index_buffer: 100 + index_buffer: 371 + index_buffer: 266 + index_buffer: 329 + index_buffer: 142 + index_buffer: 100 + index_buffer: 36 + index_buffer: 422 + index_buffer: 432 + index_buffer: 273 + index_buffer: 202 + index_buffer: 43 + index_buffer: 212 + index_buffer: 287 + index_buffer: 273 + index_buffer: 432 + index_buffer: 57 + index_buffer: 212 + index_buffer: 43 + index_buffer: 290 + index_buffer: 250 + index_buffer: 328 + index_buffer: 60 + index_buffer: 99 + index_buffer: 20 + index_buffer: 462 + index_buffer: 328 + index_buffer: 250 + index_buffer: 242 + index_buffer: 20 + index_buffer: 99 + index_buffer: 258 + index_buffer: 286 + index_buffer: 385 + index_buffer: 28 + index_buffer: 158 + index_buffer: 56 + index_buffer: 384 + index_buffer: 385 + index_buffer: 286 + index_buffer: 157 + index_buffer: 56 + index_buffer: 158 + index_buffer: 342 + index_buffer: 446 + index_buffer: 353 + index_buffer: 113 + index_buffer: 124 + index_buffer: 226 + index_buffer: 265 + index_buffer: 353 + index_buffer: 446 + index_buffer: 35 + index_buffer: 226 + index_buffer: 124 + index_buffer: 257 + index_buffer: 386 + index_buffer: 259 + index_buffer: 27 + index_buffer: 29 + index_buffer: 159 + index_buffer: 387 + index_buffer: 259 + index_buffer: 386 + index_buffer: 160 + index_buffer: 159 + index_buffer: 29 + index_buffer: 430 + index_buffer: 422 + index_buffer: 431 + index_buffer: 210 + index_buffer: 211 + index_buffer: 202 + index_buffer: 424 + index_buffer: 431 + index_buffer: 422 + index_buffer: 204 + index_buffer: 202 + index_buffer: 211 + index_buffer: 445 + index_buffer: 342 + index_buffer: 276 + index_buffer: 225 + index_buffer: 46 + index_buffer: 113 + index_buffer: 353 + index_buffer: 276 + index_buffer: 342 + index_buffer: 124 + index_buffer: 113 + index_buffer: 46 + index_buffer: 424 + index_buffer: 422 + index_buffer: 335 + index_buffer: 204 + index_buffer: 106 + index_buffer: 202 + index_buffer: 273 + index_buffer: 335 + index_buffer: 422 + index_buffer: 43 + index_buffer: 202 + index_buffer: 106 + index_buffer: 306 + index_buffer: 292 + index_buffer: 307 + index_buffer: 76 + index_buffer: 77 + index_buffer: 62 + index_buffer: 325 + index_buffer: 307 + index_buffer: 292 + index_buffer: 96 + index_buffer: 62 + index_buffer: 77 + index_buffer: 366 + index_buffer: 447 + index_buffer: 352 + index_buffer: 137 + index_buffer: 123 + index_buffer: 227 + index_buffer: 345 + index_buffer: 352 + index_buffer: 447 + index_buffer: 116 + index_buffer: 227 + index_buffer: 123 + index_buffer: 302 + index_buffer: 268 + index_buffer: 303 + index_buffer: 72 + index_buffer: 73 + index_buffer: 38 + index_buffer: 271 + index_buffer: 303 + index_buffer: 268 + index_buffer: 41 + index_buffer: 38 + index_buffer: 73 + index_buffer: 371 + index_buffer: 358 + index_buffer: 266 + index_buffer: 142 + index_buffer: 36 + index_buffer: 129 + index_buffer: 423 + index_buffer: 266 + index_buffer: 358 + index_buffer: 203 + index_buffer: 129 + index_buffer: 36 + index_buffer: 327 + index_buffer: 294 + index_buffer: 460 + index_buffer: 98 + index_buffer: 240 + index_buffer: 64 + index_buffer: 455 + index_buffer: 460 + index_buffer: 294 + index_buffer: 235 + index_buffer: 64 + index_buffer: 240 + index_buffer: 294 + index_buffer: 331 + index_buffer: 278 + index_buffer: 64 + index_buffer: 48 + index_buffer: 102 + index_buffer: 279 + index_buffer: 278 + index_buffer: 331 + index_buffer: 49 + index_buffer: 102 + index_buffer: 48 + index_buffer: 303 + index_buffer: 271 + index_buffer: 304 + index_buffer: 73 + index_buffer: 74 + index_buffer: 41 + index_buffer: 272 + index_buffer: 304 + index_buffer: 271 + index_buffer: 42 + index_buffer: 41 + index_buffer: 74 + index_buffer: 427 + index_buffer: 436 + index_buffer: 434 + index_buffer: 207 + index_buffer: 214 + index_buffer: 216 + index_buffer: 432 + index_buffer: 434 + index_buffer: 436 + index_buffer: 212 + index_buffer: 216 + index_buffer: 214 + index_buffer: 304 + index_buffer: 272 + index_buffer: 408 + index_buffer: 74 + index_buffer: 184 + index_buffer: 42 + index_buffer: 407 + index_buffer: 408 + index_buffer: 272 + index_buffer: 183 + index_buffer: 42 + index_buffer: 184 + index_buffer: 394 + index_buffer: 430 + index_buffer: 395 + index_buffer: 169 + index_buffer: 170 + index_buffer: 210 + index_buffer: 431 + index_buffer: 395 + index_buffer: 430 + index_buffer: 211 + index_buffer: 210 + index_buffer: 170 + index_buffer: 395 + index_buffer: 369 + index_buffer: 378 + index_buffer: 170 + index_buffer: 149 + index_buffer: 140 + index_buffer: 400 + index_buffer: 378 + index_buffer: 369 + index_buffer: 176 + index_buffer: 140 + index_buffer: 149 + index_buffer: 296 + index_buffer: 334 + index_buffer: 299 + index_buffer: 66 + index_buffer: 69 + index_buffer: 105 + index_buffer: 333 + index_buffer: 299 + index_buffer: 334 + index_buffer: 104 + index_buffer: 105 + index_buffer: 69 + index_buffer: 417 + index_buffer: 168 + index_buffer: 351 + index_buffer: 193 + index_buffer: 122 + index_buffer: 168 + index_buffer: 6 + index_buffer: 351 + index_buffer: 168 + index_buffer: 6 + index_buffer: 168 + index_buffer: 122 + index_buffer: 280 + index_buffer: 411 + index_buffer: 352 + index_buffer: 50 + index_buffer: 123 + index_buffer: 187 + index_buffer: 376 + index_buffer: 352 + index_buffer: 411 + index_buffer: 147 + index_buffer: 187 + index_buffer: 123 + index_buffer: 319 + index_buffer: 320 + index_buffer: 325 + index_buffer: 89 + index_buffer: 96 + index_buffer: 90 + index_buffer: 307 + index_buffer: 325 + index_buffer: 320 + index_buffer: 77 + index_buffer: 90 + index_buffer: 96 + index_buffer: 285 + index_buffer: 295 + index_buffer: 336 + index_buffer: 55 + index_buffer: 107 + index_buffer: 65 + index_buffer: 296 + index_buffer: 336 + index_buffer: 295 + index_buffer: 66 + index_buffer: 65 + index_buffer: 107 + index_buffer: 404 + index_buffer: 320 + index_buffer: 403 + index_buffer: 180 + index_buffer: 179 + index_buffer: 90 + index_buffer: 319 + index_buffer: 403 + index_buffer: 320 + index_buffer: 89 + index_buffer: 90 + index_buffer: 179 + index_buffer: 330 + index_buffer: 348 + index_buffer: 329 + index_buffer: 101 + index_buffer: 100 + index_buffer: 119 + index_buffer: 349 + index_buffer: 329 + index_buffer: 348 + index_buffer: 120 + index_buffer: 119 + index_buffer: 100 + index_buffer: 334 + index_buffer: 293 + index_buffer: 333 + index_buffer: 105 + index_buffer: 104 + index_buffer: 63 + index_buffer: 298 + index_buffer: 333 + index_buffer: 293 + index_buffer: 68 + index_buffer: 63 + index_buffer: 104 + index_buffer: 323 + index_buffer: 454 + index_buffer: 366 + index_buffer: 93 + index_buffer: 137 + index_buffer: 234 + index_buffer: 447 + index_buffer: 366 + index_buffer: 454 + index_buffer: 227 + index_buffer: 234 + index_buffer: 137 + index_buffer: 16 + index_buffer: 315 + index_buffer: 15 + index_buffer: 16 + index_buffer: 15 + index_buffer: 85 + index_buffer: 316 + index_buffer: 15 + index_buffer: 315 + index_buffer: 86 + index_buffer: 85 + index_buffer: 15 + index_buffer: 429 + index_buffer: 279 + index_buffer: 358 + index_buffer: 209 + index_buffer: 129 + index_buffer: 49 + index_buffer: 331 + index_buffer: 358 + index_buffer: 279 + index_buffer: 102 + index_buffer: 49 + index_buffer: 129 + index_buffer: 15 + index_buffer: 316 + index_buffer: 14 + index_buffer: 15 + index_buffer: 14 + index_buffer: 86 + index_buffer: 317 + index_buffer: 14 + index_buffer: 316 + index_buffer: 87 + index_buffer: 86 + index_buffer: 14 + index_buffer: 8 + index_buffer: 285 + index_buffer: 9 + index_buffer: 8 + index_buffer: 9 + index_buffer: 55 + index_buffer: 336 + index_buffer: 9 + index_buffer: 285 + index_buffer: 107 + index_buffer: 55 + index_buffer: 9 + index_buffer: 329 + index_buffer: 349 + index_buffer: 277 + index_buffer: 100 + index_buffer: 47 + index_buffer: 120 + index_buffer: 350 + index_buffer: 277 + index_buffer: 349 + index_buffer: 121 + index_buffer: 120 + index_buffer: 47 + index_buffer: 252 + index_buffer: 253 + index_buffer: 380 + index_buffer: 22 + index_buffer: 153 + index_buffer: 23 + index_buffer: 374 + index_buffer: 380 + index_buffer: 253 + index_buffer: 145 + index_buffer: 23 + index_buffer: 153 + index_buffer: 402 + index_buffer: 403 + index_buffer: 318 + index_buffer: 178 + index_buffer: 88 + index_buffer: 179 + index_buffer: 319 + index_buffer: 318 + index_buffer: 403 + index_buffer: 89 + index_buffer: 179 + index_buffer: 88 + index_buffer: 351 + index_buffer: 6 + index_buffer: 419 + index_buffer: 122 + index_buffer: 196 + index_buffer: 6 + index_buffer: 197 + index_buffer: 419 + index_buffer: 6 + index_buffer: 197 + index_buffer: 6 + index_buffer: 196 + index_buffer: 324 + index_buffer: 318 + index_buffer: 325 + index_buffer: 95 + index_buffer: 96 + index_buffer: 88 + index_buffer: 319 + index_buffer: 325 + index_buffer: 318 + index_buffer: 89 + index_buffer: 88 + index_buffer: 96 + index_buffer: 397 + index_buffer: 367 + index_buffer: 365 + index_buffer: 172 + index_buffer: 136 + index_buffer: 138 + index_buffer: 364 + index_buffer: 365 + index_buffer: 367 + index_buffer: 135 + index_buffer: 138 + index_buffer: 136 + index_buffer: 288 + index_buffer: 435 + index_buffer: 397 + index_buffer: 58 + index_buffer: 172 + index_buffer: 215 + index_buffer: 367 + index_buffer: 397 + index_buffer: 435 + index_buffer: 138 + index_buffer: 215 + index_buffer: 172 + index_buffer: 438 + index_buffer: 439 + index_buffer: 344 + index_buffer: 218 + index_buffer: 115 + index_buffer: 219 + index_buffer: 278 + index_buffer: 344 + index_buffer: 439 + index_buffer: 48 + index_buffer: 219 + index_buffer: 115 + index_buffer: 271 + index_buffer: 311 + index_buffer: 272 + index_buffer: 41 + index_buffer: 42 + index_buffer: 81 + index_buffer: 310 + index_buffer: 272 + index_buffer: 311 + index_buffer: 80 + index_buffer: 81 + index_buffer: 42 + index_buffer: 5 + index_buffer: 281 + index_buffer: 195 + index_buffer: 5 + index_buffer: 195 + index_buffer: 51 + index_buffer: 248 + index_buffer: 195 + index_buffer: 281 + index_buffer: 3 + index_buffer: 51 + index_buffer: 195 + index_buffer: 273 + index_buffer: 287 + index_buffer: 375 + index_buffer: 43 + index_buffer: 146 + index_buffer: 57 + index_buffer: 291 + index_buffer: 375 + index_buffer: 287 + index_buffer: 61 + index_buffer: 57 + index_buffer: 146 + index_buffer: 396 + index_buffer: 428 + index_buffer: 175 + index_buffer: 171 + index_buffer: 175 + index_buffer: 208 + index_buffer: 199 + index_buffer: 175 + index_buffer: 428 + index_buffer: 199 + index_buffer: 208 + index_buffer: 175 + index_buffer: 268 + index_buffer: 312 + index_buffer: 271 + index_buffer: 38 + index_buffer: 41 + index_buffer: 82 + index_buffer: 311 + index_buffer: 271 + index_buffer: 312 + index_buffer: 81 + index_buffer: 82 + index_buffer: 41 + index_buffer: 444 + index_buffer: 445 + index_buffer: 283 + index_buffer: 224 + index_buffer: 53 + index_buffer: 225 + index_buffer: 276 + index_buffer: 283 + index_buffer: 445 + index_buffer: 46 + index_buffer: 225 + index_buffer: 53 + index_buffer: 254 + index_buffer: 339 + index_buffer: 373 + index_buffer: 24 + index_buffer: 144 + index_buffer: 110 + index_buffer: 390 + index_buffer: 373 + index_buffer: 339 + index_buffer: 163 + index_buffer: 110 + index_buffer: 144 + index_buffer: 295 + index_buffer: 282 + index_buffer: 296 + index_buffer: 65 + index_buffer: 66 + index_buffer: 52 + index_buffer: 334 + index_buffer: 296 + index_buffer: 282 + index_buffer: 105 + index_buffer: 52 + index_buffer: 66 + index_buffer: 346 + index_buffer: 448 + index_buffer: 347 + index_buffer: 117 + index_buffer: 118 + index_buffer: 228 + index_buffer: 449 + index_buffer: 347 + index_buffer: 448 + index_buffer: 229 + index_buffer: 228 + index_buffer: 118 + index_buffer: 454 + index_buffer: 356 + index_buffer: 447 + index_buffer: 234 + index_buffer: 227 + index_buffer: 127 + index_buffer: 264 + index_buffer: 447 + index_buffer: 356 + index_buffer: 34 + index_buffer: 127 + index_buffer: 227 + index_buffer: 336 + index_buffer: 296 + index_buffer: 337 + index_buffer: 107 + index_buffer: 108 + index_buffer: 66 + index_buffer: 299 + index_buffer: 337 + index_buffer: 296 + index_buffer: 69 + index_buffer: 66 + index_buffer: 108 + index_buffer: 151 + index_buffer: 337 + index_buffer: 10 + index_buffer: 151 + index_buffer: 10 + index_buffer: 108 + index_buffer: 338 + index_buffer: 10 + index_buffer: 337 + index_buffer: 109 + index_buffer: 108 + index_buffer: 10 + index_buffer: 278 + index_buffer: 439 + index_buffer: 294 + index_buffer: 48 + index_buffer: 64 + index_buffer: 219 + index_buffer: 455 + index_buffer: 294 + index_buffer: 439 + index_buffer: 235 + index_buffer: 219 + index_buffer: 64 + index_buffer: 407 + index_buffer: 415 + index_buffer: 292 + index_buffer: 183 + index_buffer: 62 + index_buffer: 191 + index_buffer: 308 + index_buffer: 292 + index_buffer: 415 + index_buffer: 78 + index_buffer: 191 + index_buffer: 62 + index_buffer: 358 + index_buffer: 371 + index_buffer: 429 + index_buffer: 129 + index_buffer: 209 + index_buffer: 142 + index_buffer: 355 + index_buffer: 429 + index_buffer: 371 + index_buffer: 126 + index_buffer: 142 + index_buffer: 209 + index_buffer: 345 + index_buffer: 372 + index_buffer: 340 + index_buffer: 116 + index_buffer: 111 + index_buffer: 143 + index_buffer: 265 + index_buffer: 340 + index_buffer: 372 + index_buffer: 35 + index_buffer: 143 + index_buffer: 111 + index_buffer: 388 + index_buffer: 390 + index_buffer: 466 + index_buffer: 161 + index_buffer: 246 + index_buffer: 163 + index_buffer: 249 + index_buffer: 466 + index_buffer: 390 + index_buffer: 7 + index_buffer: 163 + index_buffer: 246 + index_buffer: 352 + index_buffer: 346 + index_buffer: 280 + index_buffer: 123 + index_buffer: 50 + index_buffer: 117 + index_buffer: 347 + index_buffer: 280 + index_buffer: 346 + index_buffer: 118 + index_buffer: 117 + index_buffer: 50 + index_buffer: 295 + index_buffer: 442 + index_buffer: 282 + index_buffer: 65 + index_buffer: 52 + index_buffer: 222 + index_buffer: 443 + index_buffer: 282 + index_buffer: 442 + index_buffer: 223 + index_buffer: 222 + index_buffer: 52 + index_buffer: 19 + index_buffer: 94 + index_buffer: 354 + index_buffer: 19 + index_buffer: 125 + index_buffer: 94 + index_buffer: 370 + index_buffer: 354 + index_buffer: 94 + index_buffer: 141 + index_buffer: 94 + index_buffer: 125 + index_buffer: 295 + index_buffer: 285 + index_buffer: 442 + index_buffer: 65 + index_buffer: 222 + index_buffer: 55 + index_buffer: 441 + index_buffer: 442 + index_buffer: 285 + index_buffer: 221 + index_buffer: 55 + index_buffer: 222 + index_buffer: 419 + index_buffer: 197 + index_buffer: 248 + index_buffer: 196 + index_buffer: 3 + index_buffer: 197 + index_buffer: 195 + index_buffer: 248 + index_buffer: 197 + index_buffer: 195 + index_buffer: 197 + index_buffer: 3 + index_buffer: 359 + index_buffer: 263 + index_buffer: 255 + index_buffer: 130 + index_buffer: 25 + index_buffer: 33 + index_buffer: 249 + index_buffer: 255 + index_buffer: 263 + index_buffer: 7 + index_buffer: 33 + index_buffer: 25 + index_buffer: 275 + index_buffer: 274 + index_buffer: 440 + index_buffer: 45 + index_buffer: 220 + index_buffer: 44 + index_buffer: 457 + index_buffer: 440 + index_buffer: 274 + index_buffer: 237 + index_buffer: 44 + index_buffer: 220 + index_buffer: 300 + index_buffer: 383 + index_buffer: 301 + index_buffer: 70 + index_buffer: 71 + index_buffer: 156 + index_buffer: 368 + index_buffer: 301 + index_buffer: 383 + index_buffer: 139 + index_buffer: 156 + index_buffer: 71 + index_buffer: 417 + index_buffer: 351 + index_buffer: 465 + index_buffer: 193 + index_buffer: 245 + index_buffer: 122 + index_buffer: 412 + index_buffer: 465 + index_buffer: 351 + index_buffer: 188 + index_buffer: 122 + index_buffer: 245 + index_buffer: 466 + index_buffer: 263 + index_buffer: 467 + index_buffer: 246 + index_buffer: 247 + index_buffer: 33 + index_buffer: 359 + index_buffer: 467 + index_buffer: 263 + index_buffer: 130 + index_buffer: 33 + index_buffer: 247 + index_buffer: 389 + index_buffer: 251 + index_buffer: 368 + index_buffer: 162 + index_buffer: 139 + index_buffer: 21 + index_buffer: 301 + index_buffer: 368 + index_buffer: 251 + index_buffer: 71 + index_buffer: 21 + index_buffer: 139 + index_buffer: 374 + index_buffer: 386 + index_buffer: 380 + index_buffer: 145 + index_buffer: 153 + index_buffer: 159 + index_buffer: 385 + index_buffer: 380 + index_buffer: 386 + index_buffer: 158 + index_buffer: 159 + index_buffer: 153 + index_buffer: 379 + index_buffer: 394 + index_buffer: 378 + index_buffer: 150 + index_buffer: 149 + index_buffer: 169 + index_buffer: 395 + index_buffer: 378 + index_buffer: 394 + index_buffer: 170 + index_buffer: 169 + index_buffer: 149 + index_buffer: 351 + index_buffer: 419 + index_buffer: 412 + index_buffer: 122 + index_buffer: 188 + index_buffer: 196 + index_buffer: 399 + index_buffer: 412 + index_buffer: 419 + index_buffer: 174 + index_buffer: 196 + index_buffer: 188 + index_buffer: 426 + index_buffer: 322 + index_buffer: 436 + index_buffer: 206 + index_buffer: 216 + index_buffer: 92 + index_buffer: 410 + index_buffer: 436 + index_buffer: 322 + index_buffer: 186 + index_buffer: 92 + index_buffer: 216 + index_buffer: 387 + index_buffer: 373 + index_buffer: 388 + index_buffer: 160 + index_buffer: 161 + index_buffer: 144 + index_buffer: 390 + index_buffer: 388 + index_buffer: 373 + index_buffer: 163 + index_buffer: 144 + index_buffer: 161 + index_buffer: 393 + index_buffer: 326 + index_buffer: 164 + index_buffer: 167 + index_buffer: 164 + index_buffer: 97 + index_buffer: 2 + index_buffer: 164 + index_buffer: 326 + index_buffer: 2 + index_buffer: 97 + index_buffer: 164 + index_buffer: 354 + index_buffer: 370 + index_buffer: 461 + index_buffer: 125 + index_buffer: 241 + index_buffer: 141 + index_buffer: 462 + index_buffer: 461 + index_buffer: 370 + index_buffer: 242 + index_buffer: 141 + index_buffer: 241 + index_buffer: 0 + index_buffer: 267 + index_buffer: 164 + index_buffer: 0 + index_buffer: 164 + index_buffer: 37 + index_buffer: 393 + index_buffer: 164 + index_buffer: 267 + index_buffer: 167 + index_buffer: 37 + index_buffer: 164 + index_buffer: 11 + index_buffer: 12 + index_buffer: 302 + index_buffer: 11 + index_buffer: 72 + index_buffer: 12 + index_buffer: 268 + index_buffer: 302 + index_buffer: 12 + index_buffer: 38 + index_buffer: 12 + index_buffer: 72 + index_buffer: 386 + index_buffer: 374 + index_buffer: 387 + index_buffer: 159 + index_buffer: 160 + index_buffer: 145 + index_buffer: 373 + index_buffer: 387 + index_buffer: 374 + index_buffer: 144 + index_buffer: 145 + index_buffer: 160 + index_buffer: 12 + index_buffer: 13 + index_buffer: 268 + index_buffer: 12 + index_buffer: 38 + index_buffer: 13 + index_buffer: 312 + index_buffer: 268 + index_buffer: 13 + index_buffer: 82 + index_buffer: 13 + index_buffer: 38 + index_buffer: 293 + index_buffer: 300 + index_buffer: 298 + index_buffer: 63 + index_buffer: 68 + index_buffer: 70 + index_buffer: 301 + index_buffer: 298 + index_buffer: 300 + index_buffer: 71 + index_buffer: 70 + index_buffer: 68 + index_buffer: 340 + index_buffer: 265 + index_buffer: 261 + index_buffer: 111 + index_buffer: 31 + index_buffer: 35 + index_buffer: 446 + index_buffer: 261 + index_buffer: 265 + index_buffer: 226 + index_buffer: 35 + index_buffer: 31 + index_buffer: 380 + index_buffer: 385 + index_buffer: 381 + index_buffer: 153 + index_buffer: 154 + index_buffer: 158 + index_buffer: 384 + index_buffer: 381 + index_buffer: 385 + index_buffer: 157 + index_buffer: 158 + index_buffer: 154 + index_buffer: 280 + index_buffer: 330 + index_buffer: 425 + index_buffer: 50 + index_buffer: 205 + index_buffer: 101 + index_buffer: 266 + index_buffer: 425 + index_buffer: 330 + index_buffer: 36 + index_buffer: 101 + index_buffer: 205 + index_buffer: 423 + index_buffer: 391 + index_buffer: 426 + index_buffer: 203 + index_buffer: 206 + index_buffer: 165 + index_buffer: 322 + index_buffer: 426 + index_buffer: 391 + index_buffer: 92 + index_buffer: 165 + index_buffer: 206 + index_buffer: 429 + index_buffer: 355 + index_buffer: 420 + index_buffer: 209 + index_buffer: 198 + index_buffer: 126 + index_buffer: 437 + index_buffer: 420 + index_buffer: 355 + index_buffer: 217 + index_buffer: 126 + index_buffer: 198 + index_buffer: 391 + index_buffer: 327 + index_buffer: 393 + index_buffer: 165 + index_buffer: 167 + index_buffer: 98 + index_buffer: 326 + index_buffer: 393 + index_buffer: 327 + index_buffer: 97 + index_buffer: 98 + index_buffer: 167 + index_buffer: 457 + index_buffer: 438 + index_buffer: 440 + index_buffer: 237 + index_buffer: 220 + index_buffer: 218 + index_buffer: 344 + index_buffer: 440 + index_buffer: 438 + index_buffer: 115 + index_buffer: 218 + index_buffer: 220 + index_buffer: 382 + index_buffer: 362 + index_buffer: 341 + index_buffer: 155 + index_buffer: 112 + index_buffer: 133 + index_buffer: 463 + index_buffer: 341 + index_buffer: 362 + index_buffer: 243 + index_buffer: 133 + index_buffer: 112 + index_buffer: 457 + index_buffer: 461 + index_buffer: 459 + index_buffer: 237 + index_buffer: 239 + index_buffer: 241 + index_buffer: 458 + index_buffer: 459 + index_buffer: 461 + index_buffer: 238 + index_buffer: 241 + index_buffer: 239 + index_buffer: 434 + index_buffer: 430 + index_buffer: 364 + index_buffer: 214 + index_buffer: 135 + index_buffer: 210 + index_buffer: 394 + index_buffer: 364 + index_buffer: 430 + index_buffer: 169 + index_buffer: 210 + index_buffer: 135 + index_buffer: 414 + index_buffer: 463 + index_buffer: 398 + index_buffer: 190 + index_buffer: 173 + index_buffer: 243 + index_buffer: 362 + index_buffer: 398 + index_buffer: 463 + index_buffer: 133 + index_buffer: 243 + index_buffer: 173 + index_buffer: 262 + index_buffer: 428 + index_buffer: 369 + index_buffer: 32 + index_buffer: 140 + index_buffer: 208 + index_buffer: 396 + index_buffer: 369 + index_buffer: 428 + index_buffer: 171 + index_buffer: 208 + index_buffer: 140 + index_buffer: 457 + index_buffer: 274 + index_buffer: 461 + index_buffer: 237 + index_buffer: 241 + index_buffer: 44 + index_buffer: 354 + index_buffer: 461 + index_buffer: 274 + index_buffer: 125 + index_buffer: 44 + index_buffer: 241 + index_buffer: 316 + index_buffer: 403 + index_buffer: 317 + index_buffer: 86 + index_buffer: 87 + index_buffer: 179 + index_buffer: 402 + index_buffer: 317 + index_buffer: 403 + index_buffer: 178 + index_buffer: 179 + index_buffer: 87 + index_buffer: 315 + index_buffer: 404 + index_buffer: 316 + index_buffer: 85 + index_buffer: 86 + index_buffer: 180 + index_buffer: 403 + index_buffer: 316 + index_buffer: 404 + index_buffer: 179 + index_buffer: 180 + index_buffer: 86 + index_buffer: 314 + index_buffer: 405 + index_buffer: 315 + index_buffer: 84 + index_buffer: 85 + index_buffer: 181 + index_buffer: 404 + index_buffer: 315 + index_buffer: 405 + index_buffer: 180 + index_buffer: 181 + index_buffer: 85 + index_buffer: 313 + index_buffer: 406 + index_buffer: 314 + index_buffer: 83 + index_buffer: 84 + index_buffer: 182 + index_buffer: 405 + index_buffer: 314 + index_buffer: 406 + index_buffer: 181 + index_buffer: 182 + index_buffer: 84 + index_buffer: 418 + index_buffer: 406 + index_buffer: 421 + index_buffer: 194 + index_buffer: 201 + index_buffer: 182 + index_buffer: 313 + index_buffer: 421 + index_buffer: 406 + index_buffer: 83 + index_buffer: 182 + index_buffer: 201 + index_buffer: 366 + index_buffer: 401 + index_buffer: 323 + index_buffer: 137 + index_buffer: 93 + index_buffer: 177 + index_buffer: 361 + index_buffer: 323 + index_buffer: 401 + index_buffer: 132 + index_buffer: 177 + index_buffer: 93 + index_buffer: 408 + index_buffer: 407 + index_buffer: 306 + index_buffer: 184 + index_buffer: 76 + index_buffer: 183 + index_buffer: 292 + index_buffer: 306 + index_buffer: 407 + index_buffer: 62 + index_buffer: 183 + index_buffer: 76 + index_buffer: 408 + index_buffer: 306 + index_buffer: 409 + index_buffer: 184 + index_buffer: 185 + index_buffer: 76 + index_buffer: 291 + index_buffer: 409 + index_buffer: 306 + index_buffer: 61 + index_buffer: 76 + index_buffer: 185 + index_buffer: 410 + index_buffer: 409 + index_buffer: 287 + index_buffer: 186 + index_buffer: 57 + index_buffer: 185 + index_buffer: 291 + index_buffer: 287 + index_buffer: 409 + index_buffer: 61 + index_buffer: 185 + index_buffer: 57 + index_buffer: 436 + index_buffer: 410 + index_buffer: 432 + index_buffer: 216 + index_buffer: 212 + index_buffer: 186 + index_buffer: 287 + index_buffer: 432 + index_buffer: 410 + index_buffer: 57 + index_buffer: 186 + index_buffer: 212 + index_buffer: 434 + index_buffer: 416 + index_buffer: 427 + index_buffer: 214 + index_buffer: 207 + index_buffer: 192 + index_buffer: 411 + index_buffer: 427 + index_buffer: 416 + index_buffer: 187 + index_buffer: 192 + index_buffer: 207 + index_buffer: 264 + index_buffer: 368 + index_buffer: 372 + index_buffer: 34 + index_buffer: 143 + index_buffer: 139 + index_buffer: 383 + index_buffer: 372 + index_buffer: 368 + index_buffer: 156 + index_buffer: 139 + index_buffer: 143 + index_buffer: 457 + index_buffer: 459 + index_buffer: 438 + index_buffer: 237 + index_buffer: 218 + index_buffer: 239 + index_buffer: 309 + index_buffer: 438 + index_buffer: 459 + index_buffer: 79 + index_buffer: 239 + index_buffer: 218 + index_buffer: 352 + index_buffer: 376 + index_buffer: 366 + index_buffer: 123 + index_buffer: 137 + index_buffer: 147 + index_buffer: 401 + index_buffer: 366 + index_buffer: 376 + index_buffer: 177 + index_buffer: 147 + index_buffer: 137 + index_buffer: 4 + index_buffer: 1 + index_buffer: 275 + index_buffer: 4 + index_buffer: 45 + index_buffer: 1 + index_buffer: 274 + index_buffer: 275 + index_buffer: 1 + index_buffer: 44 + index_buffer: 1 + index_buffer: 45 + index_buffer: 428 + index_buffer: 262 + index_buffer: 421 + index_buffer: 208 + index_buffer: 201 + index_buffer: 32 + index_buffer: 418 + index_buffer: 421 + index_buffer: 262 + index_buffer: 194 + index_buffer: 32 + index_buffer: 201 + index_buffer: 327 + index_buffer: 358 + index_buffer: 294 + index_buffer: 98 + index_buffer: 64 + index_buffer: 129 + index_buffer: 331 + index_buffer: 294 + index_buffer: 358 + index_buffer: 102 + index_buffer: 129 + index_buffer: 64 + index_buffer: 367 + index_buffer: 435 + index_buffer: 416 + index_buffer: 138 + index_buffer: 192 + index_buffer: 215 + index_buffer: 433 + index_buffer: 416 + index_buffer: 435 + index_buffer: 213 + index_buffer: 215 + index_buffer: 192 + index_buffer: 455 + index_buffer: 439 + index_buffer: 289 + index_buffer: 235 + index_buffer: 59 + index_buffer: 219 + index_buffer: 392 + index_buffer: 289 + index_buffer: 439 + index_buffer: 166 + index_buffer: 219 + index_buffer: 59 + index_buffer: 328 + index_buffer: 462 + index_buffer: 326 + index_buffer: 99 + index_buffer: 97 + index_buffer: 242 + index_buffer: 370 + index_buffer: 326 + index_buffer: 462 + index_buffer: 141 + index_buffer: 242 + index_buffer: 97 + index_buffer: 326 + index_buffer: 370 + index_buffer: 2 + index_buffer: 97 + index_buffer: 2 + index_buffer: 141 + index_buffer: 94 + index_buffer: 2 + index_buffer: 370 + index_buffer: 94 + index_buffer: 141 + index_buffer: 2 + index_buffer: 460 + index_buffer: 455 + index_buffer: 305 + index_buffer: 240 + index_buffer: 75 + index_buffer: 235 + index_buffer: 289 + index_buffer: 305 + index_buffer: 455 + index_buffer: 59 + index_buffer: 235 + index_buffer: 75 + index_buffer: 448 + index_buffer: 339 + index_buffer: 449 + index_buffer: 228 + index_buffer: 229 + index_buffer: 110 + index_buffer: 254 + index_buffer: 449 + index_buffer: 339 + index_buffer: 24 + index_buffer: 110 + index_buffer: 229 + index_buffer: 261 + index_buffer: 446 + index_buffer: 255 + index_buffer: 31 + index_buffer: 25 + index_buffer: 226 + index_buffer: 359 + index_buffer: 255 + index_buffer: 446 + index_buffer: 130 + index_buffer: 226 + index_buffer: 25 + index_buffer: 449 + index_buffer: 254 + index_buffer: 450 + index_buffer: 229 + index_buffer: 230 + index_buffer: 24 + index_buffer: 253 + index_buffer: 450 + index_buffer: 254 + index_buffer: 23 + index_buffer: 24 + index_buffer: 230 + index_buffer: 450 + index_buffer: 253 + index_buffer: 451 + index_buffer: 230 + index_buffer: 231 + index_buffer: 23 + index_buffer: 252 + index_buffer: 451 + index_buffer: 253 + index_buffer: 22 + index_buffer: 23 + index_buffer: 231 + index_buffer: 451 + index_buffer: 252 + index_buffer: 452 + index_buffer: 231 + index_buffer: 232 + index_buffer: 22 + index_buffer: 256 + index_buffer: 452 + index_buffer: 252 + index_buffer: 26 + index_buffer: 22 + index_buffer: 232 + index_buffer: 256 + index_buffer: 341 + index_buffer: 452 + index_buffer: 26 + index_buffer: 232 + index_buffer: 112 + index_buffer: 453 + index_buffer: 452 + index_buffer: 341 + index_buffer: 233 + index_buffer: 112 + index_buffer: 232 + index_buffer: 413 + index_buffer: 464 + index_buffer: 414 + index_buffer: 189 + index_buffer: 190 + index_buffer: 244 + index_buffer: 463 + index_buffer: 414 + index_buffer: 464 + index_buffer: 243 + index_buffer: 244 + index_buffer: 190 + index_buffer: 441 + index_buffer: 413 + index_buffer: 286 + index_buffer: 221 + index_buffer: 56 + index_buffer: 189 + index_buffer: 414 + index_buffer: 286 + index_buffer: 413 + index_buffer: 190 + index_buffer: 189 + index_buffer: 56 + index_buffer: 441 + index_buffer: 286 + index_buffer: 442 + index_buffer: 221 + index_buffer: 222 + index_buffer: 56 + index_buffer: 258 + index_buffer: 442 + index_buffer: 286 + index_buffer: 28 + index_buffer: 56 + index_buffer: 222 + index_buffer: 442 + index_buffer: 258 + index_buffer: 443 + index_buffer: 222 + index_buffer: 223 + index_buffer: 28 + index_buffer: 257 + index_buffer: 443 + index_buffer: 258 + index_buffer: 27 + index_buffer: 28 + index_buffer: 223 + index_buffer: 444 + index_buffer: 443 + index_buffer: 259 + index_buffer: 224 + index_buffer: 29 + index_buffer: 223 + index_buffer: 257 + index_buffer: 259 + index_buffer: 443 + index_buffer: 27 + index_buffer: 223 + index_buffer: 29 + index_buffer: 259 + index_buffer: 260 + index_buffer: 444 + index_buffer: 29 + index_buffer: 224 + index_buffer: 30 + index_buffer: 445 + index_buffer: 444 + index_buffer: 260 + index_buffer: 225 + index_buffer: 30 + index_buffer: 224 + index_buffer: 260 + index_buffer: 467 + index_buffer: 445 + index_buffer: 30 + index_buffer: 225 + index_buffer: 247 + index_buffer: 342 + index_buffer: 445 + index_buffer: 467 + index_buffer: 113 + index_buffer: 247 + index_buffer: 225 + index_buffer: 250 + index_buffer: 309 + index_buffer: 458 + index_buffer: 20 + index_buffer: 238 + index_buffer: 79 + index_buffer: 459 + index_buffer: 458 + index_buffer: 309 + index_buffer: 239 + index_buffer: 79 + index_buffer: 238 + index_buffer: 290 + index_buffer: 305 + index_buffer: 392 + index_buffer: 60 + index_buffer: 166 + index_buffer: 75 + index_buffer: 289 + index_buffer: 392 + index_buffer: 305 + index_buffer: 59 + index_buffer: 75 + index_buffer: 166 + index_buffer: 460 + index_buffer: 305 + index_buffer: 328 + index_buffer: 240 + index_buffer: 99 + index_buffer: 75 + index_buffer: 290 + index_buffer: 328 + index_buffer: 305 + index_buffer: 60 + index_buffer: 75 + index_buffer: 99 + index_buffer: 376 + index_buffer: 433 + index_buffer: 401 + index_buffer: 147 + index_buffer: 177 + index_buffer: 213 + index_buffer: 435 + index_buffer: 401 + index_buffer: 433 + index_buffer: 215 + index_buffer: 213 + index_buffer: 177 + index_buffer: 250 + index_buffer: 290 + index_buffer: 309 + index_buffer: 20 + index_buffer: 79 + index_buffer: 60 + index_buffer: 392 + index_buffer: 309 + index_buffer: 290 + index_buffer: 166 + index_buffer: 60 + index_buffer: 79 + index_buffer: 411 + index_buffer: 416 + index_buffer: 376 + index_buffer: 187 + index_buffer: 147 + index_buffer: 192 + index_buffer: 433 + index_buffer: 376 + index_buffer: 416 + index_buffer: 213 + index_buffer: 192 + index_buffer: 147 + index_buffer: 341 + index_buffer: 463 + index_buffer: 453 + index_buffer: 112 + index_buffer: 233 + index_buffer: 243 + index_buffer: 464 + index_buffer: 453 + index_buffer: 463 + index_buffer: 244 + index_buffer: 243 + index_buffer: 233 + index_buffer: 453 + index_buffer: 464 + index_buffer: 357 + index_buffer: 233 + index_buffer: 128 + index_buffer: 244 + index_buffer: 465 + index_buffer: 357 + index_buffer: 464 + index_buffer: 245 + index_buffer: 244 + index_buffer: 128 + index_buffer: 412 + index_buffer: 343 + index_buffer: 465 + index_buffer: 188 + index_buffer: 245 + index_buffer: 114 + index_buffer: 357 + index_buffer: 465 + index_buffer: 343 + index_buffer: 128 + index_buffer: 114 + index_buffer: 245 + index_buffer: 437 + index_buffer: 343 + index_buffer: 399 + index_buffer: 217 + index_buffer: 174 + index_buffer: 114 + index_buffer: 412 + index_buffer: 399 + index_buffer: 343 + index_buffer: 188 + index_buffer: 114 + index_buffer: 174 + index_buffer: 363 + index_buffer: 440 + index_buffer: 360 + index_buffer: 134 + index_buffer: 131 + index_buffer: 220 + index_buffer: 344 + index_buffer: 360 + index_buffer: 440 + index_buffer: 115 + index_buffer: 220 + index_buffer: 131 + index_buffer: 456 + index_buffer: 420 + index_buffer: 399 + index_buffer: 236 + index_buffer: 174 + index_buffer: 198 + index_buffer: 437 + index_buffer: 399 + index_buffer: 420 + index_buffer: 217 + index_buffer: 198 + index_buffer: 174 + index_buffer: 456 + index_buffer: 363 + index_buffer: 420 + index_buffer: 236 + index_buffer: 198 + index_buffer: 134 + index_buffer: 360 + index_buffer: 420 + index_buffer: 363 + index_buffer: 131 + index_buffer: 134 + index_buffer: 198 + index_buffer: 361 + index_buffer: 401 + index_buffer: 288 + index_buffer: 132 + index_buffer: 58 + index_buffer: 177 + index_buffer: 435 + index_buffer: 288 + index_buffer: 401 + index_buffer: 215 + index_buffer: 177 + index_buffer: 58 + index_buffer: 353 + index_buffer: 265 + index_buffer: 383 + index_buffer: 124 + index_buffer: 156 + index_buffer: 35 + index_buffer: 372 + index_buffer: 383 + index_buffer: 265 + index_buffer: 143 + index_buffer: 35 + index_buffer: 156 + index_buffer: 255 + index_buffer: 249 + index_buffer: 339 + index_buffer: 25 + index_buffer: 110 + index_buffer: 7 + index_buffer: 390 + index_buffer: 339 + index_buffer: 249 + index_buffer: 163 + index_buffer: 7 + index_buffer: 110 + index_buffer: 261 + index_buffer: 255 + index_buffer: 448 + index_buffer: 31 + index_buffer: 228 + index_buffer: 25 + index_buffer: 339 + index_buffer: 448 + index_buffer: 255 + index_buffer: 110 + index_buffer: 25 + index_buffer: 228 + index_buffer: 14 + index_buffer: 317 + index_buffer: 13 + index_buffer: 14 + index_buffer: 13 + index_buffer: 87 + index_buffer: 312 + index_buffer: 13 + index_buffer: 317 + index_buffer: 82 + index_buffer: 87 + index_buffer: 13 + index_buffer: 317 + index_buffer: 402 + index_buffer: 312 + index_buffer: 87 + index_buffer: 82 + index_buffer: 178 + index_buffer: 311 + index_buffer: 312 + index_buffer: 402 + index_buffer: 81 + index_buffer: 178 + index_buffer: 82 + index_buffer: 402 + index_buffer: 318 + index_buffer: 311 + index_buffer: 178 + index_buffer: 81 + index_buffer: 88 + index_buffer: 310 + index_buffer: 311 + index_buffer: 318 + index_buffer: 80 + index_buffer: 88 + index_buffer: 81 + index_buffer: 318 + index_buffer: 324 + index_buffer: 310 + index_buffer: 88 + index_buffer: 80 + index_buffer: 95 + index_buffer: 415 + index_buffer: 310 + index_buffer: 324 + index_buffer: 191 + index_buffer: 95 + index_buffer: 80 +} diff --git a/mediapipe/modules/face_geometry/effect_renderer_calculator.cc b/mediapipe/modules/face_geometry/effect_renderer_calculator.cc new file mode 100644 index 0000000..f353b8f --- /dev/null +++ b/mediapipe/modules/face_geometry/effect_renderer_calculator.cc @@ -0,0 +1,284 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/types/optional.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/port/opencv_core_inc.h" // NOTYPO +#include "mediapipe/framework/port/opencv_imgcodecs_inc.h" // NOTYPO +#include "mediapipe/framework/port/opencv_imgproc_inc.h" // NOTYPO +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/gpu/gl_calculator_helper.h" +#include "mediapipe/gpu/gpu_buffer.h" +#include "mediapipe/modules/face_geometry/effect_renderer_calculator.pb.h" +#include "mediapipe/modules/face_geometry/libs/effect_renderer.h" +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" +#include "mediapipe/util/resource_util.h" + +namespace mediapipe { +namespace { + +static constexpr char kEnvironmentTag[] = "ENVIRONMENT"; +static constexpr char kImageGpuTag[] = "IMAGE_GPU"; +static constexpr char kMultiFaceGeometryTag[] = "MULTI_FACE_GEOMETRY"; + +// A calculator that renders a visual effect for multiple faces. +// +// Inputs: +// IMAGE_GPU (`GpuBuffer`, required): +// A buffer containing input image. +// +// MULTI_FACE_GEOMETRY (`std::vector`, optional): +// A vector of face geometry data. +// +// If absent, the input GPU buffer is copied over into the output GPU buffer +// without any effect being rendered. +// +// Input side packets: +// ENVIRONMENT (`face_geometry::Environment`, required) +// Describes an environment; includes the camera frame origin point location +// as well as virtual camera parameters. +// +// Output: +// IMAGE_GPU (`GpuBuffer`, required): +// A buffer with a visual effect being rendered for multiple faces. +// +// Options: +// effect_texture_path (`string`, required): +// Defines a path for the visual effect texture file. The effect texture is +// later rendered on top of the effect mesh. +// +// The texture file format must be supported by the OpenCV image decoder. It +// must also define either an RGB or an RGBA texture. +// +// effect_mesh_3d_path (`string`, optional): +// Defines a path for the visual effect mesh 3D file. The effect mesh is +// later "attached" to the face and is driven by the face pose +// transformation matrix. +// +// The mesh 3D file format must be the binary `face_geometry.Mesh3d` proto. +// +// If is not present, the runtime face mesh will be used as the effect mesh +// - this mode is handy for facepaint effects. +// +class EffectRendererCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + MP_RETURN_IF_ERROR(mediapipe::GlCalculatorHelper::UpdateContract(cc)) + << "Failed to update contract for the GPU helper!"; + + cc->InputSidePackets() + .Tag(kEnvironmentTag) + .Set(); + cc->Inputs().Tag(kImageGpuTag).Set(); + cc->Inputs() + .Tag(kMultiFaceGeometryTag) + .Set>(); + cc->Outputs().Tag(kImageGpuTag).Set(); + + return mediapipe::GlCalculatorHelper::UpdateContract(cc); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(mediapipe::TimestampDiff(0)); + + MP_RETURN_IF_ERROR(gpu_helper_.Open(cc)) + << "Failed to open the GPU helper!"; + return gpu_helper_.RunInGlContext([&]() -> absl::Status { + const auto& options = + cc->Options(); + + const auto& environment = cc->InputSidePackets() + .Tag(kEnvironmentTag) + .Get(); + + MP_RETURN_IF_ERROR(face_geometry::ValidateEnvironment(environment)) + << "Invalid environment!"; + + absl::optional effect_mesh_3d; + if (options.has_effect_mesh_3d_path()) { + ASSIGN_OR_RETURN(effect_mesh_3d, + ReadMesh3dFromFile(options.effect_mesh_3d_path()), + _ << "Failed to read the effect 3D mesh from file!"); + + MP_RETURN_IF_ERROR(face_geometry::ValidateMesh3d(*effect_mesh_3d)) + << "Invalid effect 3D mesh!"; + } + + ASSIGN_OR_RETURN(ImageFrame effect_texture, + ReadTextureFromFile(options.effect_texture_path()), + _ << "Failed to read the effect texture from file!"); + + ASSIGN_OR_RETURN(effect_renderer_, + CreateEffectRenderer(environment, effect_mesh_3d, + std::move(effect_texture)), + _ << "Failed to create the effect renderer!"); + + return absl::OkStatus(); + }); + } + + absl::Status Process(CalculatorContext* cc) override { + // The `IMAGE_GPU` stream is required to have a non-empty packet. In case + // this requirement is not met, there's nothing to be processed at the + // current timestamp. + if (cc->Inputs().Tag(kImageGpuTag).IsEmpty()) { + return absl::OkStatus(); + } + + return gpu_helper_.RunInGlContext([this, cc]() -> absl::Status { + const auto& input_gpu_buffer = + cc->Inputs().Tag(kImageGpuTag).Get(); + + GlTexture input_gl_texture = + gpu_helper_.CreateSourceTexture(input_gpu_buffer); + + GlTexture output_gl_texture = gpu_helper_.CreateDestinationTexture( + input_gl_texture.width(), input_gl_texture.height()); + + std::vector empty_multi_face_geometry; + const auto& multi_face_geometry = + cc->Inputs().Tag(kMultiFaceGeometryTag).IsEmpty() + ? empty_multi_face_geometry + : cc->Inputs() + .Tag(kMultiFaceGeometryTag) + .Get>(); + + // Validate input multi face geometry data. + for (const face_geometry::FaceGeometry& face_geometry : + multi_face_geometry) { + MP_RETURN_IF_ERROR(face_geometry::ValidateFaceGeometry(face_geometry)) + << "Invalid face geometry!"; + } + + MP_RETURN_IF_ERROR(effect_renderer_->RenderEffect( + multi_face_geometry, input_gl_texture.width(), + input_gl_texture.height(), input_gl_texture.target(), + input_gl_texture.name(), output_gl_texture.target(), + output_gl_texture.name())) + << "Failed to render the effect!"; + + std::unique_ptr output_gpu_buffer = + output_gl_texture.GetFrame(); + + cc->Outputs() + .Tag(kImageGpuTag) + .AddPacket(mediapipe::Adopt(output_gpu_buffer.release()) + .At(cc->InputTimestamp())); + + output_gl_texture.Release(); + input_gl_texture.Release(); + + return absl::OkStatus(); + }); + } + + ~EffectRendererCalculator() { + gpu_helper_.RunInGlContext([this]() { effect_renderer_.reset(); }); + } + + private: + static absl::StatusOr ReadTextureFromFile( + const std::string& texture_path) { + ASSIGN_OR_RETURN(std::string texture_blob, + ReadContentBlobFromFile(texture_path), + _ << "Failed to read texture blob from file!"); + + // Use OpenCV image decoding functionality to finish reading the texture. + std::vector texture_blob_vector(texture_blob.begin(), + texture_blob.end()); + cv::Mat decoded_mat = + cv::imdecode(texture_blob_vector, cv::IMREAD_UNCHANGED); + + RET_CHECK(decoded_mat.type() == CV_8UC3 || decoded_mat.type() == CV_8UC4) + << "Texture must have `char` as the underlying type and " + "must have either 3 or 4 channels!"; + + ImageFormat::Format image_format = ImageFormat::UNKNOWN; + cv::Mat output_mat; + switch (decoded_mat.channels()) { + case 3: + image_format = ImageFormat::SRGB; + cv::cvtColor(decoded_mat, output_mat, cv::COLOR_BGR2RGB); + break; + + case 4: + image_format = ImageFormat::SRGBA; + cv::cvtColor(decoded_mat, output_mat, cv::COLOR_BGRA2RGBA); + break; + + default: + RET_CHECK_FAIL() + << "Unexpected number of channels; expected 3 or 4, got " + << decoded_mat.channels() << "!"; + } + + ImageFrame output_image_frame(image_format, output_mat.size().width, + output_mat.size().height, + ImageFrame::kGlDefaultAlignmentBoundary); + + output_mat.copyTo(formats::MatView(&output_image_frame)); + + return output_image_frame; + } + + static absl::StatusOr ReadMesh3dFromFile( + const std::string& mesh_3d_path) { + ASSIGN_OR_RETURN(std::string mesh_3d_blob, + ReadContentBlobFromFile(mesh_3d_path), + _ << "Failed to read mesh 3D blob from file!"); + + face_geometry::Mesh3d mesh_3d; + RET_CHECK(mesh_3d.ParseFromString(mesh_3d_blob)) + << "Failed to parse a mesh 3D proto from a binary blob!"; + + return mesh_3d; + } + + static absl::StatusOr ReadContentBlobFromFile( + const std::string& unresolved_path) { + ASSIGN_OR_RETURN(std::string resolved_path, + mediapipe::PathToResourceAsFile(unresolved_path), + _ << "Failed to resolve path! Path = " << unresolved_path); + + std::string content_blob; + MP_RETURN_IF_ERROR( + mediapipe::GetResourceContents(resolved_path, &content_blob)) + << "Failed to read content blob! Resolved path = " << resolved_path; + + return content_blob; + } + + mediapipe::GlCalculatorHelper gpu_helper_; + std::unique_ptr effect_renderer_; +}; + +} // namespace + +using FaceGeometryEffectRendererCalculator = EffectRendererCalculator; + +REGISTER_CALCULATOR(FaceGeometryEffectRendererCalculator); + +} // namespace mediapipe diff --git a/mediapipe/modules/face_geometry/effect_renderer_calculator.proto b/mediapipe/modules/face_geometry/effect_renderer_calculator.proto new file mode 100644 index 0000000..6c23903 --- /dev/null +++ b/mediapipe/modules/face_geometry/effect_renderer_calculator.proto @@ -0,0 +1,46 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator_options.proto"; + +message FaceGeometryEffectRendererCalculatorOptions { + extend CalculatorOptions { + optional FaceGeometryEffectRendererCalculatorOptions ext = 323693808; + } + + // Defines a path for the visual effect texture file. The effect texture is + // later rendered on top of the effect mesh. + // + // Please be aware about the difference between the CPU texture memory layout + // and the GPU texture sampler coordinate space. This renderer follows + // conventions discussed here: https://open.gl/textures + // + // The texture file format must be supported by the OpenCV image decoder. It + // must also define either an RGB or an RGBA texture. + optional string effect_texture_path = 1; + + // Defines a path for the visual effect mesh 3D file. The effect mesh is later + // "attached" to the face and is driven by the face pose transformation + // matrix. + // + // The mesh 3D file format must be the binary `face_system.Mesh3d` proto. + // + // If is not present, the runtime face mesh will be used as the effect mesh + // - this mode is handy for facepaint effects. + optional string effect_mesh_3d_path = 2; +} diff --git a/mediapipe/modules/face_geometry/env_generator_calculator.cc b/mediapipe/modules/face_geometry/env_generator_calculator.cc new file mode 100644 index 0000000..2e95a66 --- /dev/null +++ b/mediapipe/modules/face_geometry/env_generator_calculator.cc @@ -0,0 +1,81 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/modules/face_geometry/env_generator_calculator.pb.h" +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" + +namespace mediapipe { +namespace { + +static constexpr char kEnvironmentTag[] = "ENVIRONMENT"; + +// A calculator that generates an environment, which describes a virtual scene. +// +// Output side packets: +// ENVIRONMENT (`face_geometry::Environment`, required) +// Describes an environment; includes the camera frame origin point location +// as well as virtual camera parameters. +// +// Options: +// environment (`face_geometry.Environment`, required): +// Defines an environment to be packed as the output side packet. +// +// Must be valid (for details, please refer to the proto message definition +// comments and/or `modules/face_geometry/libs/validation_utils.h/cc`) +// +class EnvGeneratorCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->OutputSidePackets() + .Tag(kEnvironmentTag) + .Set(); + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(mediapipe::TimestampDiff(0)); + + const face_geometry::Environment& environment = + cc->Options().environment(); + + MP_RETURN_IF_ERROR(face_geometry::ValidateEnvironment(environment)) + << "Invalid environment!"; + + cc->OutputSidePackets() + .Tag(kEnvironmentTag) + .Set(mediapipe::MakePacket(environment)); + + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override { + return absl::OkStatus(); + } + + absl::Status Close(CalculatorContext* cc) override { + return absl::OkStatus(); + } +}; + +} // namespace + +using FaceGeometryEnvGeneratorCalculator = EnvGeneratorCalculator; + +REGISTER_CALCULATOR(FaceGeometryEnvGeneratorCalculator); + +} // namespace mediapipe diff --git a/mediapipe/modules/face_geometry/env_generator_calculator.proto b/mediapipe/modules/face_geometry/env_generator_calculator.proto new file mode 100644 index 0000000..dea2ae0 --- /dev/null +++ b/mediapipe/modules/face_geometry/env_generator_calculator.proto @@ -0,0 +1,32 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator_options.proto"; +import "mediapipe/modules/face_geometry/protos/environment.proto"; + +message FaceGeometryEnvGeneratorCalculatorOptions { + extend CalculatorOptions { + optional FaceGeometryEnvGeneratorCalculatorOptions ext = 323693810; + } + + // Defines an environment to be packed as the output side packet. + // + // Must be valid (for details, please refer to the proto message definition + // comments and/or `modules/face_geometry/libs/validation_utils.h/cc`) + optional face_geometry.Environment environment = 1; +} diff --git a/mediapipe/modules/face_geometry/face_geometry.pbtxt b/mediapipe/modules/face_geometry/face_geometry.pbtxt new file mode 100644 index 0000000..76228d4 --- /dev/null +++ b/mediapipe/modules/face_geometry/face_geometry.pbtxt @@ -0,0 +1,48 @@ +# MediaPipe graph to extract geometry from face landmarks for multiple faces. +# +# It is required that "geometry_pipeline_metadata.binarypb" is available at +# "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata.binarypb" +# path during execution. +# +# This is a deprecated subgraph kept for backward-compatibility reasons. Please, +# be explicit and use the `FaceGeometryFromLandmarks` subgraph in the new code +# to enable the same runtime behaviour. + +type: "FaceGeometry" + +# The size of the input frame. The first element of the pair is the frame width; +# the other one is the frame height. +# +# The face landmarks should have been detected on a frame with the same +# ratio. If used as-is, the resulting face geometry visualization should be +# happening on a frame with the same ratio as well. +# +# (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Collection of detected/predicted faces, each represented as a list of face +# landmarks. (std::vector) +input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" + +# Environment that describes the current virtual scene. +# (face_geometry::Environment) +input_side_packet: "ENVIRONMENT:environment" + +# A list of geometry data for each detected face. +# (std::vector) +output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + +# Extracts face geometry for multiple faces from a vector of face landmark +# lists. +node { + calculator: "FaceGeometryPipelineCalculator" + input_side_packet: "ENVIRONMENT:environment" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + options: { + [mediapipe.FaceGeometryPipelineCalculatorOptions.ext] { + metadata_path: "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata.binarypb" + } + } +} diff --git a/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt b/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt new file mode 100644 index 0000000..f570286 --- /dev/null +++ b/mediapipe/modules/face_geometry/face_geometry_from_detection.pbtxt @@ -0,0 +1,87 @@ +# MediaPipe graph to extract geometry from face detection for multiple faces. +# +# It is required that "geometry_pipeline_metadata_detection.binarypb" is +# available at +# "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_detection.binarypb" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceGeometryFromDetection" +# input_stream: "IMAGE_SIZE:image_size" +# input_stream: "MULTI_FACE_DETECTION:multi_face_detection" +# input_side_packet: "ENVIRONMENT:environment" +# output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" +# } + +type: "FaceGeometryFromDetection" + +# The size of the input frame. The first element of the pair is the frame width; +# the other one is the frame height. +# +# The face landmarks should have been detected on a frame with the same +# ratio. If used as-is, the resulting face geometry visualization should be +# happening on a frame with the same ratio as well. +# +# (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Collection of detected/predicted faces, each represented as a detection. +# (std::vector) +input_stream: "MULTI_FACE_DETECTION:multi_face_detection" + +# Environment that describes the current virtual scene. +# (face_geometry::Environment) +input_side_packet: "ENVIRONMENT:environment" + +# A list of geometry data for each detected face. +# (std::vector) +# +# NOTE: the triangular topology of the face meshes is only useful when derived +# from the 468 face landmarks, not from the 6 face detection landmarks +# (keypoints). The former don't cover the entire face and this mesh is +# defined here only to comply with the API. It should be considered as +# a placeholder and/or for debugging purposes. +# +# Use the face geometry derived from the face detection landmarks +# (keypoints) for the face pose transformation matrix, not the mesh. +output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + +# Begin iterating over a vector of the face detections. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:multi_face_detection" + output_stream: "ITEM:face_detection" + output_stream: "BATCH_END:detection_timestamp" +} + +# Extracts face detection keypoints as a normalized landmarks. +node { + calculator: "DetectionToLandmarksCalculator" + input_stream: "DETECTION:face_detection" + output_stream: "LANDMARKS:face_landmarks" +} + +# End iterating over a vector of the face detections and receive a vector of +# face landmark lists as a result. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:face_landmarks" + input_stream: "BATCH_END:detection_timestamp" + output_stream: "ITERABLE:multi_face_landmarks" +} + +# Extracts face geometry for multiple faces from a vector of face detection +# landmark lists. +node { + calculator: "FaceGeometryPipelineCalculator" + input_side_packet: "ENVIRONMENT:environment" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + options: { + [mediapipe.FaceGeometryPipelineCalculatorOptions.ext] { + metadata_path: "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_detection.binarypb" + } + } +} diff --git a/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt b/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt new file mode 100644 index 0000000..3291476 --- /dev/null +++ b/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt @@ -0,0 +1,54 @@ +# MediaPipe graph to extract geometry from face landmarks for multiple faces. +# +# It is required that "geometry_pipeline_metadata_from_landmark.binarypb" is +# available at +# "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_from_landmarks.binarypb" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "FaceGeometryFromLandmarks" +# input_stream: "IMAGE_SIZE:image_size" +# input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" +# input_side_packet: "ENVIRONMENT:environment" +# output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" +# } + +type: "FaceGeometryFromLandmarks" + +# The size of the input frame. The first element of the pair is the frame width; +# the other one is the frame height. +# +# The face landmarks should have been detected on a frame with the same +# ratio. If used as-is, the resulting face geometry visualization should be +# happening on a frame with the same ratio as well. +# +# (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Collection of detected/predicted faces, each represented as a list of face +# landmarks. (std::vector) +input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" + +# Environment that describes the current virtual scene. +# (face_geometry::Environment) +input_side_packet: "ENVIRONMENT:environment" + +# A list of geometry data for each detected face. +# (std::vector) +output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + +# Extracts face geometry for multiple faces from a vector of face landmark +# lists. +node { + calculator: "FaceGeometryPipelineCalculator" + input_side_packet: "ENVIRONMENT:environment" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "MULTI_FACE_LANDMARKS:multi_face_landmarks" + output_stream: "MULTI_FACE_GEOMETRY:multi_face_geometry" + options: { + [mediapipe.FaceGeometryPipelineCalculatorOptions.ext] { + metadata_path: "mediapipe/modules/face_geometry/data/geometry_pipeline_metadata_landmarks.binarypb" + } + } +} diff --git a/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc new file mode 100644 index 0000000..87e710e --- /dev/null +++ b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc @@ -0,0 +1,197 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/modules/face_geometry/geometry_pipeline_calculator.pb.h" +#include "mediapipe/modules/face_geometry/libs/geometry_pipeline.h" +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.pb.h" +#include "mediapipe/util/resource_util.h" + +namespace mediapipe { +namespace { + +static constexpr char kEnvironmentTag[] = "ENVIRONMENT"; +static constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +static constexpr char kMultiFaceGeometryTag[] = "MULTI_FACE_GEOMETRY"; +static constexpr char kMultiFaceLandmarksTag[] = "MULTI_FACE_LANDMARKS"; + +// A calculator that renders a visual effect for multiple faces. +// +// Inputs: +// IMAGE_SIZE (`std::pair`, required): +// The size of the current frame. The first element of the pair is the frame +// width; the other one is the frame height. +// +// The face landmarks should have been detected on a frame with the same +// ratio. If used as-is, the resulting face geometry visualization should be +// happening on a frame with the same ratio as well. +// +// MULTI_FACE_LANDMARKS (`std::vector`, required): +// A vector of face landmark lists. +// +// Input side packets: +// ENVIRONMENT (`face_geometry::Environment`, required) +// Describes an environment; includes the camera frame origin point location +// as well as virtual camera parameters. +// +// Output: +// MULTI_FACE_GEOMETRY (`std::vector`, required): +// A vector of face geometry data. +// +// Options: +// metadata_path (`string`, optional): +// Defines a path for the geometry pipeline metadata file. +// +// The geometry pipeline metadata file format must be the binary +// `face_geometry.GeometryPipelineMetadata` proto. +// +class GeometryPipelineCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->InputSidePackets() + .Tag(kEnvironmentTag) + .Set(); + cc->Inputs().Tag(kImageSizeTag).Set>(); + cc->Inputs() + .Tag(kMultiFaceLandmarksTag) + .Set>(); + cc->Outputs() + .Tag(kMultiFaceGeometryTag) + .Set>(); + + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(mediapipe::TimestampDiff(0)); + + const auto& options = cc->Options(); + + ASSIGN_OR_RETURN( + face_geometry::GeometryPipelineMetadata metadata, + ReadMetadataFromFile(options.metadata_path()), + _ << "Failed to read the geometry pipeline metadata from file!"); + + MP_RETURN_IF_ERROR( + face_geometry::ValidateGeometryPipelineMetadata(metadata)) + << "Invalid geometry pipeline metadata!"; + + const face_geometry::Environment& environment = + cc->InputSidePackets() + .Tag(kEnvironmentTag) + .Get(); + + MP_RETURN_IF_ERROR(face_geometry::ValidateEnvironment(environment)) + << "Invalid environment!"; + + ASSIGN_OR_RETURN( + geometry_pipeline_, + face_geometry::CreateGeometryPipeline(environment, metadata), + _ << "Failed to create a geometry pipeline!"); + + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override { + // Both the `IMAGE_SIZE` and the `MULTI_FACE_LANDMARKS` streams are required + // to have a non-empty packet. In case this requirement is not met, there's + // nothing to be processed at the current timestamp. + if (cc->Inputs().Tag(kImageSizeTag).IsEmpty() || + cc->Inputs().Tag(kMultiFaceLandmarksTag).IsEmpty()) { + return absl::OkStatus(); + } + + const auto& image_size = + cc->Inputs().Tag(kImageSizeTag).Get>(); + const auto& multi_face_landmarks = + cc->Inputs() + .Tag(kMultiFaceLandmarksTag) + .Get>(); + + auto multi_face_geometry = + absl::make_unique>(); + + ASSIGN_OR_RETURN( + *multi_face_geometry, + geometry_pipeline_->EstimateFaceGeometry( + multi_face_landmarks, // + /*frame_width*/ image_size.first, + /*frame_height*/ image_size.second), + _ << "Failed to estimate face geometry for multiple faces!"); + + cc->Outputs() + .Tag(kMultiFaceGeometryTag) + .AddPacket(mediapipe::Adopt>( + multi_face_geometry.release()) + .At(cc->InputTimestamp())); + + return absl::OkStatus(); + } + + absl::Status Close(CalculatorContext* cc) override { + return absl::OkStatus(); + } + + private: + static absl::StatusOr + ReadMetadataFromFile(const std::string& metadata_path) { + ASSIGN_OR_RETURN(std::string metadata_blob, + ReadContentBlobFromFile(metadata_path), + _ << "Failed to read a metadata blob from file!"); + + face_geometry::GeometryPipelineMetadata metadata; + RET_CHECK(metadata.ParseFromString(metadata_blob)) + << "Failed to parse a metadata proto from a binary blob!"; + + return metadata; + } + + static absl::StatusOr ReadContentBlobFromFile( + const std::string& unresolved_path) { + ASSIGN_OR_RETURN(std::string resolved_path, + mediapipe::PathToResourceAsFile(unresolved_path), + _ << "Failed to resolve path! Path = " << unresolved_path); + + std::string content_blob; + MP_RETURN_IF_ERROR( + mediapipe::GetResourceContents(resolved_path, &content_blob)) + << "Failed to read content blob! Resolved path = " << resolved_path; + + return content_blob; + } + + std::unique_ptr geometry_pipeline_; +}; + +} // namespace + +using FaceGeometryPipelineCalculator = GeometryPipelineCalculator; + +REGISTER_CALCULATOR(FaceGeometryPipelineCalculator); + +} // namespace mediapipe diff --git a/mediapipe/modules/face_geometry/geometry_pipeline_calculator.proto b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.proto new file mode 100644 index 0000000..638bb45 --- /dev/null +++ b/mediapipe/modules/face_geometry/geometry_pipeline_calculator.proto @@ -0,0 +1,27 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator_options.proto"; + +message FaceGeometryPipelineCalculatorOptions { + extend CalculatorOptions { + optional FaceGeometryPipelineCalculatorOptions ext = 323693812; + } + + optional string metadata_path = 1; +} diff --git a/mediapipe/modules/face_geometry/libs/BUILD b/mediapipe/modules/face_geometry/libs/BUILD new file mode 100644 index 0000000..35dc451 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/BUILD @@ -0,0 +1,103 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "effect_renderer", + srcs = ["effect_renderer.cc"], + hdrs = ["effect_renderer.h"], + deps = [ + ":mesh_3d_utils", + ":validation_utils", + "//mediapipe/framework/formats:image_format_cc_proto", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "//mediapipe/gpu:gl_base", + "//mediapipe/gpu:shader_util", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + "//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto", + "//mediapipe/modules/face_geometry/protos:mesh_3d_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/types:optional", + ], +) + +cc_library( + name = "geometry_pipeline", + srcs = ["geometry_pipeline.cc"], + hdrs = ["geometry_pipeline.h"], + deps = [ + ":mesh_3d_utils", + ":procrustes_solver", + ":validation_utils", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + "//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto", + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_cc_proto", + "//mediapipe/modules/face_geometry/protos:mesh_3d_cc_proto", + "@com_google_absl//absl/memory", + "@eigen_archive//:eigen3", + ], +) + +cc_library( + name = "mesh_3d_utils", + srcs = ["mesh_3d_utils.cc"], + hdrs = ["mesh_3d_utils.h"], + deps = [ + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:statusor", + "//mediapipe/modules/face_geometry/protos:mesh_3d_cc_proto", + ], +) + +cc_library( + name = "procrustes_solver", + srcs = ["procrustes_solver.cc"], + hdrs = ["procrustes_solver.h"], + deps = [ + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "@com_google_absl//absl/memory", + "@eigen_archive//:eigen3", + ], +) + +cc_library( + name = "validation_utils", + srcs = ["validation_utils.cc"], + hdrs = ["validation_utils.h"], + deps = [ + ":mesh_3d_utils", + "//mediapipe/framework/formats:matrix_data_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/modules/face_geometry/protos:environment_cc_proto", + "//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto", + "//mediapipe/modules/face_geometry/protos:geometry_pipeline_metadata_cc_proto", + "//mediapipe/modules/face_geometry/protos:mesh_3d_cc_proto", + ], +) diff --git a/mediapipe/modules/face_geometry/libs/effect_renderer.cc b/mediapipe/modules/face_geometry/libs/effect_renderer.cc new file mode 100644 index 0000000..27a54e0 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/effect_renderer.cc @@ -0,0 +1,733 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/face_geometry/libs/effect_renderer.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/types/optional.h" +#include "mediapipe/framework/formats/image_format.pb.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/gpu/gl_base.h" +#include "mediapipe/gpu/shader_util.h" +#include "mediapipe/modules/face_geometry/libs/mesh_3d_utils.h" +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { +namespace { + +struct RenderableMesh3d { + static absl::StatusOr CreateFromProtoMesh3d( + const Mesh3d& proto_mesh_3d) { + Mesh3d::VertexType vertex_type = proto_mesh_3d.vertex_type(); + + RenderableMesh3d renderable_mesh_3d; + renderable_mesh_3d.vertex_size = GetVertexSize(vertex_type); + ASSIGN_OR_RETURN( + renderable_mesh_3d.vertex_position_size, + GetVertexComponentSize(vertex_type, VertexComponent::POSITION), + _ << "Failed to get the position vertex size!"); + ASSIGN_OR_RETURN( + renderable_mesh_3d.tex_coord_position_size, + GetVertexComponentSize(vertex_type, VertexComponent::TEX_COORD), + _ << "Failed to get the tex coord vertex size!"); + ASSIGN_OR_RETURN( + renderable_mesh_3d.vertex_position_offset, + GetVertexComponentOffset(vertex_type, VertexComponent::POSITION), + _ << "Failed to get the position vertex offset!"); + ASSIGN_OR_RETURN( + renderable_mesh_3d.tex_coord_position_offset, + GetVertexComponentOffset(vertex_type, VertexComponent::TEX_COORD), + _ << "Failed to get the tex coord vertex offset!"); + + switch (proto_mesh_3d.primitive_type()) { + case Mesh3d::TRIANGLE: + renderable_mesh_3d.primitive_type = GL_TRIANGLES; + break; + + default: + RET_CHECK_FAIL() << "Only triangle primitive types are supported!"; + } + + renderable_mesh_3d.vertex_buffer.reserve( + proto_mesh_3d.vertex_buffer_size()); + for (float vertex_element : proto_mesh_3d.vertex_buffer()) { + renderable_mesh_3d.vertex_buffer.push_back(vertex_element); + } + + renderable_mesh_3d.index_buffer.reserve(proto_mesh_3d.index_buffer_size()); + for (uint32_t index_element : proto_mesh_3d.index_buffer()) { + RET_CHECK_LE(index_element, std::numeric_limits::max()) + << "Index buffer elements must fit into the `uint16` type in order " + "to be renderable!"; + + renderable_mesh_3d.index_buffer.push_back( + static_cast(index_element)); + } + + return renderable_mesh_3d; + } + + uint32_t vertex_size; + uint32_t vertex_position_size; + uint32_t tex_coord_position_size; + uint32_t vertex_position_offset; + uint32_t tex_coord_position_offset; + uint32_t primitive_type; + + std::vector vertex_buffer; + std::vector index_buffer; +}; + +class Texture { + public: + static absl::StatusOr> WrapExternalTexture( + GLuint handle, GLenum target, int width, int height) { + RET_CHECK(handle) << "External texture must have a non-null handle!"; + return absl::WrapUnique(new Texture(handle, target, width, height, + /*is_owned*/ false)); + } + + static absl::StatusOr> CreateFromImageFrame( + const ImageFrame& image_frame) { + RET_CHECK(image_frame.IsAligned(ImageFrame::kGlDefaultAlignmentBoundary)) + << "Image frame memory must be aligned for GL usage!"; + + RET_CHECK(image_frame.Width() > 0 && image_frame.Height() > 0) + << "Image frame must have positive dimensions!"; + + RET_CHECK(image_frame.Format() == ImageFormat::SRGB || + image_frame.Format() == ImageFormat::SRGBA) + << "Image frame format must be either SRGB or SRGBA!"; + + GLint image_format; + switch (image_frame.NumberOfChannels()) { + case 3: + image_format = GL_RGB; + break; + case 4: + image_format = GL_RGBA; + break; + default: + RET_CHECK_FAIL() + << "Unexpected number of channels; expected 3 or 4, got " + << image_frame.NumberOfChannels() << "!"; + } + + GLuint handle; + glGenTextures(1, &handle); + RET_CHECK(handle) << "Failed to initialize an OpenGL texture!"; + + glBindTexture(GL_TEXTURE_2D, handle); + glTexParameteri(GL_TEXTURE_2D, GL_NEAREST_MIPMAP_LINEAR, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexImage2D(GL_TEXTURE_2D, 0, image_format, image_frame.Width(), + image_frame.Height(), 0, image_format, GL_UNSIGNED_BYTE, + image_frame.PixelData()); + glGenerateMipmap(GL_TEXTURE_2D); + glBindTexture(GL_TEXTURE_2D, 0); + + return absl::WrapUnique(new Texture( + handle, GL_TEXTURE_2D, image_frame.Width(), image_frame.Height(), + /*is_owned*/ true)); + } + + ~Texture() { + if (is_owned_) { + glDeleteProgram(handle_); + } + } + + GLuint handle() const { return handle_; } + GLenum target() const { return target_; } + int width() const { return width_; } + int height() const { return height_; } + + private: + Texture(GLuint handle, GLenum target, int width, int height, bool is_owned) + : handle_(handle), + target_(target), + width_(width), + height_(height), + is_owned_(is_owned) {} + + GLuint handle_; + GLenum target_; + int width_; + int height_; + bool is_owned_; +}; + +class RenderTarget { + public: + static absl::StatusOr> Create() { + GLuint framebuffer_handle; + glGenFramebuffers(1, &framebuffer_handle); + RET_CHECK(framebuffer_handle) + << "Failed to initialize an OpenGL framebuffer!"; + + return absl::WrapUnique(new RenderTarget(framebuffer_handle)); + } + + ~RenderTarget() { + glDeleteFramebuffers(1, &framebuffer_handle_); + // Renderbuffer handle might have never been created if this render target + // is destroyed before `SetColorbuffer()` is called for the first time. + if (renderbuffer_handle_) { + glDeleteFramebuffers(1, &renderbuffer_handle_); + } + } + + absl::Status SetColorbuffer(const Texture& colorbuffer_texture) { + glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_handle_); + glViewport(0, 0, colorbuffer_texture.width(), colorbuffer_texture.height()); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(colorbuffer_texture.target(), colorbuffer_texture.handle()); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, + colorbuffer_texture.target(), + colorbuffer_texture.handle(), + /*level*/ 0); + glBindTexture(colorbuffer_texture.target(), 0); + + // If the existing depth buffer has different dimensions, delete it. + if (renderbuffer_handle_ && + (viewport_width_ != colorbuffer_texture.width() || + viewport_height_ != colorbuffer_texture.height())) { + glDeleteRenderbuffers(1, &renderbuffer_handle_); + renderbuffer_handle_ = 0; + } + + // If there is no depth buffer, create one. + if (!renderbuffer_handle_) { + glGenRenderbuffers(1, &renderbuffer_handle_); + RET_CHECK(renderbuffer_handle_) + << "Failed to initialize an OpenGL renderbuffer!"; + glBindRenderbuffer(GL_RENDERBUFFER, renderbuffer_handle_); + glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, + colorbuffer_texture.width(), + colorbuffer_texture.height()); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, + GL_RENDERBUFFER, renderbuffer_handle_); + glBindRenderbuffer(GL_RENDERBUFFER, 0); + } + + viewport_width_ = colorbuffer_texture.width(); + viewport_height_ = colorbuffer_texture.height(); + + glBindFramebuffer(GL_FRAMEBUFFER, 0); + glFlush(); + + return absl::OkStatus(); + } + + void Bind() const { + glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_handle_); + glViewport(0, 0, viewport_width_, viewport_height_); + } + + void Unbind() const { glBindFramebuffer(GL_FRAMEBUFFER, 0); } + + void Clear() const { + Bind(); + glEnable(GL_DEPTH_TEST); + glDepthMask(GL_TRUE); + + glClearColor(0.f, 0.f, 0.f, 0.f); + glClearDepthf(1.f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + glDepthMask(GL_FALSE); + glDisable(GL_DEPTH_TEST); + + Unbind(); + glFlush(); + } + + private: + explicit RenderTarget(GLuint framebuffer_handle) + : framebuffer_handle_(framebuffer_handle), + renderbuffer_handle_(0), + viewport_width_(-1), + viewport_height_(-1) {} + + GLuint framebuffer_handle_; + GLuint renderbuffer_handle_; + int viewport_width_; + int viewport_height_; +}; + +class Renderer { + public: + enum class RenderMode { OPAQUE, OVERDRAW, OCCLUSION }; + + static absl::StatusOr> Create() { + static const GLint kAttrLocation[NUM_ATTRIBUTES] = { + ATTRIB_VERTEX, + ATTRIB_TEXTURE_POSITION, + }; + static const GLchar* kAttrName[NUM_ATTRIBUTES] = { + "position", + "tex_coord", + }; + + static const GLchar* kVertSrc = R"( + uniform mat4 projection_mat; + uniform mat4 model_mat; + + attribute vec4 position; + attribute vec4 tex_coord; + + varying vec2 v_tex_coord; + + void main() { + v_tex_coord = tex_coord.xy; + gl_Position = projection_mat * model_mat * position; + } + )"; + + static const GLchar* kFragSrc = R"( + precision mediump float; + + varying vec2 v_tex_coord; + uniform sampler2D texture; + + void main() { + gl_FragColor = texture2D(texture, v_tex_coord); + } + )"; + + GLuint program_handle = 0; + GlhCreateProgram(kVertSrc, kFragSrc, NUM_ATTRIBUTES, + (const GLchar**)&kAttrName[0], kAttrLocation, + &program_handle); + RET_CHECK(program_handle) << "Problem initializing the texture program!"; + GLint projection_mat_uniform = + glGetUniformLocation(program_handle, "projection_mat"); + GLint model_mat_uniform = glGetUniformLocation(program_handle, "model_mat"); + GLint texture_uniform = glGetUniformLocation(program_handle, "texture"); + + RET_CHECK_NE(projection_mat_uniform, -1) + << "Failed to find `projection_mat` uniform!"; + RET_CHECK_NE(model_mat_uniform, -1) + << "Failed to find `model_mat` uniform!"; + RET_CHECK_NE(texture_uniform, -1) << "Failed to find `texture` uniform!"; + + return absl::WrapUnique(new Renderer(program_handle, projection_mat_uniform, + model_mat_uniform, texture_uniform)); + } + + ~Renderer() { glDeleteProgram(program_handle_); } + + absl::Status Render(const RenderTarget& render_target, const Texture& texture, + const RenderableMesh3d& mesh_3d, + const std::array& projection_mat, + const std::array& model_mat, + RenderMode render_mode) const { + glUseProgram(program_handle_); + // Set up the GL state. + glEnable(GL_BLEND); + glFrontFace(GL_CCW); + switch (render_mode) { + case RenderMode::OPAQUE: + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + glEnable(GL_DEPTH_TEST); + glDepthMask(GL_TRUE); + break; + + case RenderMode::OVERDRAW: + glBlendFunc(GL_ONE, GL_ZERO); + glDisable(GL_DEPTH_TEST); + glDepthMask(GL_FALSE); + break; + + case RenderMode::OCCLUSION: + glBlendFunc(GL_ZERO, GL_ONE); + glEnable(GL_DEPTH_TEST); + glDepthMask(GL_TRUE); + break; + } + + render_target.Bind(); + // Set up vertex attributes. + glVertexAttribPointer( + ATTRIB_VERTEX, mesh_3d.vertex_position_size, GL_FLOAT, 0, + mesh_3d.vertex_size * sizeof(float), + mesh_3d.vertex_buffer.data() + mesh_3d.vertex_position_offset); + glEnableVertexAttribArray(ATTRIB_VERTEX); + glVertexAttribPointer( + ATTRIB_TEXTURE_POSITION, mesh_3d.tex_coord_position_size, GL_FLOAT, 0, + mesh_3d.vertex_size * sizeof(float), + mesh_3d.vertex_buffer.data() + mesh_3d.tex_coord_position_offset); + glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION); + // Set up textures and uniforms. + glActiveTexture(GL_TEXTURE1); + glBindTexture(texture.target(), texture.handle()); + glUniform1i(texture_uniform_, 1); + glUniformMatrix4fv(projection_mat_uniform_, 1, GL_FALSE, + projection_mat.data()); + glUniformMatrix4fv(model_mat_uniform_, 1, GL_FALSE, model_mat.data()); + // Draw the mesh. + glDrawElements(mesh_3d.primitive_type, mesh_3d.index_buffer.size(), + GL_UNSIGNED_SHORT, mesh_3d.index_buffer.data()); + // Unbind textures and uniforms. + glActiveTexture(GL_TEXTURE1); + glBindTexture(texture.target(), 0); + render_target.Unbind(); + // Unbind vertex attributes. + glDisableVertexAttribArray(ATTRIB_TEXTURE_POSITION); + glDisableVertexAttribArray(ATTRIB_VERTEX); + // Restore the GL state. + glDepthMask(GL_FALSE); + glDisable(GL_DEPTH_TEST); + glDisable(GL_BLEND); + + glUseProgram(0); + glFlush(); + + return absl::OkStatus(); + } + + private: + enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, NUM_ATTRIBUTES }; + + Renderer(GLuint program_handle, GLint projection_mat_uniform, + GLint model_mat_uniform, GLint texture_uniform) + : program_handle_(program_handle), + projection_mat_uniform_(projection_mat_uniform), + model_mat_uniform_(model_mat_uniform), + texture_uniform_(texture_uniform) {} + + GLuint program_handle_; + GLint projection_mat_uniform_; + GLint model_mat_uniform_; + GLint texture_uniform_; +}; + +class EffectRendererImpl : public EffectRenderer { + public: + EffectRendererImpl( + const Environment& environment, + std::unique_ptr render_target, + std::unique_ptr renderer, + RenderableMesh3d&& renderable_quad_mesh_3d, + absl::optional&& renderable_effect_mesh_3d, + std::unique_ptr empty_color_texture, + std::unique_ptr effect_texture) + : environment_(environment), + render_target_(std::move(render_target)), + renderer_(std::move(renderer)), + renderable_quad_mesh_3d_(std::move(renderable_quad_mesh_3d)), + renderable_effect_mesh_3d_(std::move(renderable_effect_mesh_3d)), + empty_color_texture_(std::move(empty_color_texture)), + effect_texture_(std::move(effect_texture)), + identity_matrix_(Create4x4IdentityMatrix()) {} + + absl::Status RenderEffect( + const std::vector& multi_face_geometry, + int frame_width, // + int frame_height, // + GLenum src_texture_target, // + GLuint src_texture_name, // + GLenum dst_texture_target, // + GLuint dst_texture_name) { + // Validate input arguments. + MP_RETURN_IF_ERROR(ValidateFrameDimensions(frame_width, frame_height)) + << "Invalid frame dimensions!"; + RET_CHECK(src_texture_name > 0 && dst_texture_name > 0) + << "Both source and destination texture names must be non-null!"; + RET_CHECK_NE(src_texture_name, dst_texture_name) + << "Source and destination texture names must be different!"; + + // Validate all input face geometries. + for (const FaceGeometry& face_geometry : multi_face_geometry) { + MP_RETURN_IF_ERROR(ValidateFaceGeometry(face_geometry)) + << "Invalid face geometry!"; + } + + // Wrap both source and destination textures. + ASSIGN_OR_RETURN( + std::unique_ptr src_texture, + Texture::WrapExternalTexture(src_texture_name, src_texture_target, + frame_width, frame_height), + _ << "Failed to wrap the external source texture"); + ASSIGN_OR_RETURN( + std::unique_ptr dst_texture, + Texture::WrapExternalTexture(dst_texture_name, dst_texture_target, + frame_width, frame_height), + _ << "Failed to wrap the external destination texture"); + + // Set the destination texture as the color buffer. Then, clear both the + // color and the depth buffers for the render target. + MP_RETURN_IF_ERROR(render_target_->SetColorbuffer(*dst_texture)) + << "Failed to set the destination texture as the colorbuffer!"; + render_target_->Clear(); + + // Render the source texture on top of the quad mesh (i.e. make a copy) + // into the render target. + MP_RETURN_IF_ERROR(renderer_->Render( + *render_target_, *src_texture, renderable_quad_mesh_3d_, + identity_matrix_, identity_matrix_, Renderer::RenderMode::OVERDRAW)) + << "Failed to render the source texture on top of the quad mesh!"; + + // Extract pose transform matrices and meshes from the face geometry data; + const int num_faces = multi_face_geometry.size(); + + std::vector> face_pose_transform_matrices(num_faces); + std::vector renderable_face_meshes(num_faces); + for (int i = 0; i < num_faces; ++i) { + const FaceGeometry& face_geometry = multi_face_geometry[i]; + + // Extract the face pose transformation matrix. + ASSIGN_OR_RETURN( + face_pose_transform_matrices[i], + Convert4x4MatrixDataToArrayFormat( + face_geometry.pose_transform_matrix()), + _ << "Failed to extract the face pose transformation matrix!"); + + // Extract the face mesh as a renderable. + ASSIGN_OR_RETURN( + renderable_face_meshes[i], + RenderableMesh3d::CreateFromProtoMesh3d(face_geometry.mesh()), + _ << "Failed to extract a renderable face mesh!"); + } + + // Create a perspective matrix using the frame aspect ratio. + std::array perspective_matrix = CreatePerspectiveMatrix( + /*aspect_ratio*/ static_cast(frame_width) / frame_height); + + // Render a face mesh occluder for each face. + for (int i = 0; i < num_faces; ++i) { + const std::array& face_pose_transform_matrix = + face_pose_transform_matrices[i]; + const RenderableMesh3d& renderable_face_mesh = renderable_face_meshes[i]; + + // Render the face mesh using the empty color texture, i.e. the face + // mesh occluder. + // + // For occlusion, the pose transformation is moved ~1mm away from camera + // in order to allow the face mesh texture to be rendered without + // failing the depth test. + std::array occlusion_face_pose_transform_matrix = + face_pose_transform_matrix; + occlusion_face_pose_transform_matrix[14] -= 0.1f; // ~ 1mm + MP_RETURN_IF_ERROR(renderer_->Render( + *render_target_, *empty_color_texture_, renderable_face_mesh, + perspective_matrix, occlusion_face_pose_transform_matrix, + Renderer::RenderMode::OCCLUSION)) + << "Failed to render the face mesh occluder!"; + } + + // Render the main face mesh effect component for each face. + for (int i = 0; i < num_faces; ++i) { + const std::array& face_pose_transform_matrix = + face_pose_transform_matrices[i]; + + // If there is no effect 3D mesh provided, then the face mesh itself is + // used as a topology for rendering (for example, this can be used for + // facepaint effects or AR makeup). + const RenderableMesh3d& main_effect_mesh_3d = + renderable_effect_mesh_3d_ ? *renderable_effect_mesh_3d_ + : renderable_face_meshes[i]; + + MP_RETURN_IF_ERROR(renderer_->Render( + *render_target_, *effect_texture_, main_effect_mesh_3d, + perspective_matrix, face_pose_transform_matrix, + Renderer::RenderMode::OPAQUE)) + << "Failed to render the main effect pass!"; + } + + // At this point in the code, the destination texture must contain the + // correctly renderer effect, so we should just return. + return absl::OkStatus(); + } + + private: + std::array CreatePerspectiveMatrix(float aspect_ratio) const { + static constexpr float kDegreesToRadians = M_PI / 180.f; + + std::array perspective_matrix; + perspective_matrix.fill(0.f); + + const auto& env_camera = environment_.perspective_camera(); + // Standard perspective projection matrix calculations. + const float f = 1.0f / std::tan(kDegreesToRadians * + env_camera.vertical_fov_degrees() / 2.f); + + const float denom = 1.0f / (env_camera.near() - env_camera.far()); + perspective_matrix[0] = f / aspect_ratio; + perspective_matrix[5] = f; + perspective_matrix[10] = (env_camera.near() + env_camera.far()) * denom; + perspective_matrix[11] = -1.f; + perspective_matrix[14] = 2.f * env_camera.far() * env_camera.near() * denom; + + // If the environment's origin point location is in the top left corner, + // then skip additional flip along Y-axis is required to render correctly. + if (environment_.origin_point_location() == + OriginPointLocation::TOP_LEFT_CORNER) { + perspective_matrix[5] *= -1.f; + } + + return perspective_matrix; + } + + static std::array Create4x4IdentityMatrix() { + return {1.f, 0.f, 0.f, 0.f, // + 0.f, 1.f, 0.f, 0.f, // + 0.f, 0.f, 1.f, 0.f, // + 0.f, 0.f, 0.f, 1.f}; + } + + static absl::StatusOr> + Convert4x4MatrixDataToArrayFormat(const MatrixData& matrix_data) { + RET_CHECK(matrix_data.rows() == 4 && // + matrix_data.cols() == 4 && // + matrix_data.packed_data_size() == 16) + << "The matrix data must define a 4x4 matrix!"; + + std::array matrix_array; + for (int i = 0; i < 16; i++) { + matrix_array[i] = matrix_data.packed_data(i); + } + + // Matrix array must be in the OpenGL-friendly column-major order. If + // `matrix_data` is in the row-major order, then transpose. + if (matrix_data.layout() == MatrixData::ROW_MAJOR) { + std::swap(matrix_array[1], matrix_array[4]); + std::swap(matrix_array[2], matrix_array[8]); + std::swap(matrix_array[3], matrix_array[12]); + std::swap(matrix_array[6], matrix_array[9]); + std::swap(matrix_array[7], matrix_array[13]); + std::swap(matrix_array[11], matrix_array[14]); + } + + return matrix_array; + } + + Environment environment_; + + std::unique_ptr render_target_; + std::unique_ptr renderer_; + + RenderableMesh3d renderable_quad_mesh_3d_; + absl::optional renderable_effect_mesh_3d_; + + std::unique_ptr empty_color_texture_; + std::unique_ptr effect_texture_; + + std::array identity_matrix_; +}; + +Mesh3d CreateQuadMesh3d() { + static constexpr float kQuadMesh3dVertexBuffer[] = { + -1.f, -1.f, 0.f, 0.f, 0.f, // + 1.f, -1.f, 0.f, 1.f, 0.f, // + -1.f, 1.f, 0.f, 0.f, 1.f, // + 1.f, 1.f, 0.f, 1.f, 1.f, // + }; + static constexpr uint16_t kQuadMesh3dIndexBuffer[] = {0, 1, 2, 1, 3, 2}; + + static constexpr int kQuadMesh3dVertexBufferSize = + sizeof(kQuadMesh3dVertexBuffer) / sizeof(float); + static constexpr int kQuadMesh3dIndexBufferSize = + sizeof(kQuadMesh3dIndexBuffer) / sizeof(uint16_t); + + Mesh3d quad_mesh_3d; + quad_mesh_3d.set_vertex_type(Mesh3d::VERTEX_PT); + quad_mesh_3d.set_primitive_type(Mesh3d::TRIANGLE); + for (int i = 0; i < kQuadMesh3dVertexBufferSize; ++i) { + quad_mesh_3d.add_vertex_buffer(kQuadMesh3dVertexBuffer[i]); + } + for (int i = 0; i < kQuadMesh3dIndexBufferSize; ++i) { + quad_mesh_3d.add_index_buffer(kQuadMesh3dIndexBuffer[i]); + } + + return quad_mesh_3d; +} + +ImageFrame CreateEmptyColorTexture() { + static constexpr ImageFormat::Format kEmptyColorTextureFormat = + ImageFormat::SRGBA; + static constexpr int kEmptyColorTextureWidth = 1; + static constexpr int kEmptyColorTextureHeight = 1; + + ImageFrame empty_color_texture( + kEmptyColorTextureFormat, kEmptyColorTextureWidth, + kEmptyColorTextureHeight, ImageFrame::kGlDefaultAlignmentBoundary); + empty_color_texture.SetToZero(); + + return empty_color_texture; +} + +} // namespace + +absl::StatusOr> CreateEffectRenderer( + const Environment& environment, // + const absl::optional& effect_mesh_3d, // + ImageFrame&& effect_texture) { + MP_RETURN_IF_ERROR(ValidateEnvironment(environment)) + << "Invalid environment!"; + if (effect_mesh_3d) { + MP_RETURN_IF_ERROR(ValidateMesh3d(*effect_mesh_3d)) + << "Invalid effect 3D mesh!"; + } + + ASSIGN_OR_RETURN(std::unique_ptr render_target, + RenderTarget::Create(), + _ << "Failed to create a render target!"); + ASSIGN_OR_RETURN(std::unique_ptr renderer, Renderer::Create(), + _ << "Failed to create a renderer!"); + ASSIGN_OR_RETURN(RenderableMesh3d renderable_quad_mesh_3d, + RenderableMesh3d::CreateFromProtoMesh3d(CreateQuadMesh3d()), + _ << "Failed to create a renderable quad mesh!"); + absl::optional renderable_effect_mesh_3d; + if (effect_mesh_3d) { + ASSIGN_OR_RETURN(renderable_effect_mesh_3d, + RenderableMesh3d::CreateFromProtoMesh3d(*effect_mesh_3d), + _ << "Failed to create a renderable effect mesh!"); + } + ASSIGN_OR_RETURN(std::unique_ptr empty_color_gl_texture, + Texture::CreateFromImageFrame(CreateEmptyColorTexture()), + _ << "Failed to create an empty color texture!"); + ASSIGN_OR_RETURN(std::unique_ptr effect_gl_texture, + Texture::CreateFromImageFrame(effect_texture), + _ << "Failed to create an effect texture!"); + + std::unique_ptr result = + absl::make_unique( + environment, std::move(render_target), std::move(renderer), + std::move(renderable_quad_mesh_3d), + std::move(renderable_effect_mesh_3d), + std::move(empty_color_gl_texture), std::move(effect_gl_texture)); + + return result; +} + +} // namespace mediapipe::face_geometry diff --git a/mediapipe/modules/face_geometry/libs/effect_renderer.h b/mediapipe/modules/face_geometry/libs/effect_renderer.h new file mode 100644 index 0000000..71330e7 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/effect_renderer.h @@ -0,0 +1,92 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_FACE_GEOMETRY_LIBS_EFFECT_RENDERER_H_ +#define MEDIAPIPE_MODULES_FACE_GEOMETRY_LIBS_EFFECT_RENDERER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/gpu/gl_base.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { + +// Encapsulates a stateful face effect renderer. +class EffectRenderer { + public: + virtual ~EffectRenderer() = default; + + // Renders a face effect based on the multiple facial geometries. + // + // Must be called in the same GL context as was used upon initialization. + // + // Each of the `multi_face_geometry` must be valid (for details, please refer + // to the proto message definition comments and/or `validation_utils.h/cc`). + // Additionally, all face mesh index buffer elements must fit into the + // `uint16` type in order to be renderable. + // + // Both `frame_width` and `frame_height` must be positive. + // + // Both `src_texture_name` and `dst_texture_name` must be positive and + // reference existing OpenGL textures in the current context. They should also + // reference different textures as the in-place effect rendering is not yet + // supported. + virtual absl::Status RenderEffect( + const std::vector& multi_face_geometry, + int frame_width, // + int frame_height, // + GLenum src_texture_target, // + GLuint src_texture_name, // + GLenum dst_texture_target, // + GLuint dst_texture_name) = 0; +}; + +// Creates an instance of `EffectRenderer`. +// +// `effect_mesh_3d` defines a rigid 3d mesh which is "attached" to the face and +// is driven by the face pose transformation matrix. If is not present, the +// runtime face mesh will be used as the effect mesh - this mode is handy for +// facepaint effects. In both rendering modes, the face mesh is first rendered +// as an occluder straight into the depth buffer. This step helps to create a +// more believable effect via hiding invisible elements behind the face surface. +// +// `effect_texture` defines the color texture to be rendered on top of the +// effect mesh. Please be aware about the difference between the CPU texture +// memory layout and the GPU texture sampler coordinate space. This renderer +// follows conventions discussed here: https://open.gl/textures +// +// Must be called in the same GL context as will be used for rendering. +// +// Both `environment` and `effect_mesh_3d` (is present) must be valid (for +// details, please refer to the proto message definition comments and/or +// `validation_utils.h/cc`). Additionally, `effect_mesh_3d`s index buffer +// elements must fit into the `uint16` type in order to be renderable. +// +// `effect_texture` must have positive dimensions. Its format must be either +// `SRGB` or `SRGBA`. Its memory must be aligned for GL usage. +absl::StatusOr> CreateEffectRenderer( + const Environment& environment, // + const absl::optional& effect_mesh_3d, // + ImageFrame&& effect_texture); + +} // namespace mediapipe::face_geometry + +#endif // MEDIAPIPE_MODULES_FACE_GEOMETRY_LIBS_EFFECT_RENDERER_H_ diff --git a/mediapipe/modules/face_geometry/libs/geometry_pipeline.cc b/mediapipe/modules/face_geometry/libs/geometry_pipeline.cc new file mode 100644 index 0000000..bcfce7c --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/geometry_pipeline.cc @@ -0,0 +1,466 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/face_geometry/libs/geometry_pipeline.h" + +#include +#include +#include +#include +#include + +#include "Eigen/Core" +#include "absl/memory/memory.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/modules/face_geometry/libs/mesh_3d_utils.h" +#include "mediapipe/modules/face_geometry/libs/procrustes_solver.h" +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { +namespace { + +struct PerspectiveCameraFrustum { + // NOTE: all arguments must be validated prior to calling this constructor. + PerspectiveCameraFrustum(const PerspectiveCamera& perspective_camera, + int frame_width, int frame_height) { + static constexpr float kDegreesToRadians = 3.14159265358979323846f / 180.f; + + const float height_at_near = + 2.f * perspective_camera.near() * + std::tan(0.5f * kDegreesToRadians * + perspective_camera.vertical_fov_degrees()); + + const float width_at_near = frame_width * height_at_near / frame_height; + + left = -0.5f * width_at_near; + right = 0.5f * width_at_near; + bottom = -0.5f * height_at_near; + top = 0.5f * height_at_near; + near = perspective_camera.near(); + far = perspective_camera.far(); + } + + float left; + float right; + float bottom; + float top; + float near; + float far; +}; + +class ScreenToMetricSpaceConverter { + public: + ScreenToMetricSpaceConverter( + OriginPointLocation origin_point_location, // + InputSource input_source, // + Eigen::Matrix3Xf&& canonical_metric_landmarks, // + Eigen::VectorXf&& landmark_weights, // + std::unique_ptr procrustes_solver) + : origin_point_location_(origin_point_location), + input_source_(input_source), + canonical_metric_landmarks_(std::move(canonical_metric_landmarks)), + landmark_weights_(std::move(landmark_weights)), + procrustes_solver_(std::move(procrustes_solver)) {} + + // Converts `screen_landmark_list` into `metric_landmark_list` and estimates + // the `pose_transform_mat`. + // + // Here's the algorithm summary: + // + // (1) Project X- and Y- screen landmark coordinates at the Z near plane. + // + // (2) Estimate a canonical-to-runtime landmark set scale by running the + // Procrustes solver using the screen runtime landmarks. + // + // On this iteration, screen landmarks are used instead of unprojected + // metric landmarks as it is not safe to unproject due to the relative + // nature of the input screen landmark Z coordinate. + // + // (3) Use the canonical-to-runtime scale from (2) to unproject the screen + // landmarks. The result is referenced as "intermediate landmarks" because + // they are the first estimation of the resuling metric landmarks, but are + // not quite there yet. + // + // (4) Estimate a canonical-to-runtime landmark set scale by running the + // Procrustes solver using the intermediate runtime landmarks. + // + // (5) Use the product of the scale factors from (2) and (4) to unproject + // the screen landmarks the second time. This is the second and the final + // estimation of the metric landmarks. + // + // (6) Multiply each of the metric landmarks by the inverse pose + // transformation matrix to align the runtime metric face landmarks with + // the canonical metric face landmarks. + // + // Note: the input screen landmarks are in the left-handed coordinate system, + // however any metric landmarks - including the canonical metric + // landmarks, the final runtime metric landmarks and any intermediate + // runtime metric landmarks - are in the right-handed coordinate system. + // + // To keep the logic correct, the landmark set handedness is changed any + // time the screen-to-metric semantic barrier is passed. + absl::Status Convert(const NormalizedLandmarkList& screen_landmark_list, // + const PerspectiveCameraFrustum& pcf, // + LandmarkList& metric_landmark_list, // + Eigen::Matrix4f& pose_transform_mat) const { + RET_CHECK_EQ(screen_landmark_list.landmark_size(), + canonical_metric_landmarks_.cols()) + << "The number of landmarks doesn't match the number passed upon " + "initialization!"; + + Eigen::Matrix3Xf screen_landmarks; + ConvertLandmarkListToEigenMatrix(screen_landmark_list, screen_landmarks); + + ProjectXY(pcf, screen_landmarks); + const float depth_offset = screen_landmarks.row(2).mean(); + + // 1st iteration: don't unproject XY because it's unsafe to do so due to + // the relative nature of the Z coordinate. Instead, run the + // first estimation on the projected XY and use that scale to + // unproject for the 2nd iteration. + Eigen::Matrix3Xf intermediate_landmarks(screen_landmarks); + ChangeHandedness(intermediate_landmarks); + + ASSIGN_OR_RETURN(const float first_iteration_scale, + EstimateScale(intermediate_landmarks), + _ << "Failed to estimate first iteration scale!"); + + // 2nd iteration: unproject XY using the scale from the 1st iteration. + intermediate_landmarks = screen_landmarks; + MoveAndRescaleZ(pcf, depth_offset, first_iteration_scale, + intermediate_landmarks); + UnprojectXY(pcf, intermediate_landmarks); + ChangeHandedness(intermediate_landmarks); + + // For face detection input landmarks, re-write Z-coord from the canonical + // landmarks. + if (input_source_ == InputSource::FACE_DETECTION_PIPELINE) { + Eigen::Matrix4f intermediate_pose_transform_mat; + MP_RETURN_IF_ERROR(procrustes_solver_->SolveWeightedOrthogonalProblem( + canonical_metric_landmarks_, intermediate_landmarks, + landmark_weights_, intermediate_pose_transform_mat)) + << "Failed to estimate pose transform matrix!"; + + intermediate_landmarks.row(2) = + (intermediate_pose_transform_mat * + canonical_metric_landmarks_.colwise().homogeneous()) + .row(2); + } + ASSIGN_OR_RETURN(const float second_iteration_scale, + EstimateScale(intermediate_landmarks), + _ << "Failed to estimate second iteration scale!"); + + // Use the total scale to unproject the screen landmarks. + const float total_scale = first_iteration_scale * second_iteration_scale; + MoveAndRescaleZ(pcf, depth_offset, total_scale, screen_landmarks); + UnprojectXY(pcf, screen_landmarks); + ChangeHandedness(screen_landmarks); + + // At this point, screen landmarks are converted into metric landmarks. + Eigen::Matrix3Xf& metric_landmarks = screen_landmarks; + + MP_RETURN_IF_ERROR(procrustes_solver_->SolveWeightedOrthogonalProblem( + canonical_metric_landmarks_, metric_landmarks, landmark_weights_, + pose_transform_mat)) + << "Failed to estimate pose transform matrix!"; + + // For face detection input landmarks, re-write Z-coord from the canonical + // landmarks and run the pose transform estimation again. + if (input_source_ == InputSource::FACE_DETECTION_PIPELINE) { + metric_landmarks.row(2) = + (pose_transform_mat * + canonical_metric_landmarks_.colwise().homogeneous()) + .row(2); + + MP_RETURN_IF_ERROR(procrustes_solver_->SolveWeightedOrthogonalProblem( + canonical_metric_landmarks_, metric_landmarks, landmark_weights_, + pose_transform_mat)) + << "Failed to estimate pose transform matrix!"; + } + + // Multiply each of the metric landmarks by the inverse pose + // transformation matrix to align the runtime metric face landmarks with + // the canonical metric face landmarks. + metric_landmarks = (pose_transform_mat.inverse() * + metric_landmarks.colwise().homogeneous()) + .topRows(3); + + ConvertEigenMatrixToLandmarkList(metric_landmarks, metric_landmark_list); + + return absl::OkStatus(); + } + + private: + void ProjectXY(const PerspectiveCameraFrustum& pcf, + Eigen::Matrix3Xf& landmarks) const { + float x_scale = pcf.right - pcf.left; + float y_scale = pcf.top - pcf.bottom; + float x_translation = pcf.left; + float y_translation = pcf.bottom; + + if (origin_point_location_ == OriginPointLocation::TOP_LEFT_CORNER) { + landmarks.row(1) = 1.f - landmarks.row(1).array(); + } + + landmarks = + landmarks.array().colwise() * Eigen::Array3f(x_scale, y_scale, x_scale); + landmarks.colwise() += Eigen::Vector3f(x_translation, y_translation, 0.f); + } + + absl::StatusOr EstimateScale(Eigen::Matrix3Xf& landmarks) const { + Eigen::Matrix4f transform_mat; + MP_RETURN_IF_ERROR(procrustes_solver_->SolveWeightedOrthogonalProblem( + canonical_metric_landmarks_, landmarks, landmark_weights_, + transform_mat)) + << "Failed to estimate canonical-to-runtime landmark set transform!"; + + return transform_mat.col(0).norm(); + } + + static void MoveAndRescaleZ(const PerspectiveCameraFrustum& pcf, + float depth_offset, float scale, + Eigen::Matrix3Xf& landmarks) { + landmarks.row(2) = + (landmarks.array().row(2) - depth_offset + pcf.near) / scale; + } + + static void UnprojectXY(const PerspectiveCameraFrustum& pcf, + Eigen::Matrix3Xf& landmarks) { + landmarks.row(0) = + landmarks.row(0).cwiseProduct(landmarks.row(2)) / pcf.near; + landmarks.row(1) = + landmarks.row(1).cwiseProduct(landmarks.row(2)) / pcf.near; + } + + static void ChangeHandedness(Eigen::Matrix3Xf& landmarks) { + landmarks.row(2) *= -1.f; + } + + static void ConvertLandmarkListToEigenMatrix( + const NormalizedLandmarkList& landmark_list, + Eigen::Matrix3Xf& eigen_matrix) { + eigen_matrix = Eigen::Matrix3Xf(3, landmark_list.landmark_size()); + for (int i = 0; i < landmark_list.landmark_size(); ++i) { + const auto& landmark = landmark_list.landmark(i); + eigen_matrix(0, i) = landmark.x(); + eigen_matrix(1, i) = landmark.y(); + eigen_matrix(2, i) = landmark.z(); + } + } + + static void ConvertEigenMatrixToLandmarkList( + const Eigen::Matrix3Xf& eigen_matrix, LandmarkList& landmark_list) { + landmark_list.Clear(); + + for (int i = 0; i < eigen_matrix.cols(); ++i) { + auto& landmark = *landmark_list.add_landmark(); + landmark.set_x(eigen_matrix(0, i)); + landmark.set_y(eigen_matrix(1, i)); + landmark.set_z(eigen_matrix(2, i)); + } + } + + const OriginPointLocation origin_point_location_; + const InputSource input_source_; + Eigen::Matrix3Xf canonical_metric_landmarks_; + Eigen::VectorXf landmark_weights_; + + std::unique_ptr procrustes_solver_; +}; + +class GeometryPipelineImpl : public GeometryPipeline { + public: + GeometryPipelineImpl( + const PerspectiveCamera& perspective_camera, // + const Mesh3d& canonical_mesh, // + uint32_t canonical_mesh_vertex_size, // + uint32_t canonical_mesh_num_vertices, + uint32_t canonical_mesh_vertex_position_offset, + std::unique_ptr space_converter) + : perspective_camera_(perspective_camera), + canonical_mesh_(canonical_mesh), + canonical_mesh_vertex_size_(canonical_mesh_vertex_size), + canonical_mesh_num_vertices_(canonical_mesh_num_vertices), + canonical_mesh_vertex_position_offset_( + canonical_mesh_vertex_position_offset), + space_converter_(std::move(space_converter)) {} + + absl::StatusOr> EstimateFaceGeometry( + const std::vector& multi_face_landmarks, + int frame_width, int frame_height) const override { + MP_RETURN_IF_ERROR(ValidateFrameDimensions(frame_width, frame_height)) + << "Invalid frame dimensions!"; + + // Create a perspective camera frustum to be shared for geometry estimation + // per each face. + PerspectiveCameraFrustum pcf(perspective_camera_, frame_width, + frame_height); + + std::vector multi_face_geometry; + + // From this point, the meaning of "face landmarks" is clarified further as + // "screen face landmarks". This is done do distinguish from "metric face + // landmarks" that are derived during the face geometry estimation process. + for (const NormalizedLandmarkList& screen_face_landmarks : + multi_face_landmarks) { + // Having a too compact screen landmark list will result in numerical + // instabilities, therefore such faces are filtered. + if (IsScreenLandmarkListTooCompact(screen_face_landmarks)) { + continue; + } + + // Convert the screen landmarks into the metric landmarks and get the pose + // transformation matrix. + LandmarkList metric_face_landmarks; + Eigen::Matrix4f pose_transform_mat; + MP_RETURN_IF_ERROR(space_converter_->Convert(screen_face_landmarks, pcf, + metric_face_landmarks, + pose_transform_mat)) + << "Failed to convert landmarks from the screen to the metric space!"; + + // Pack geometry data for this face. + FaceGeometry face_geometry; + Mesh3d* mutable_mesh = face_geometry.mutable_mesh(); + // Copy the canonical face mesh as the face geometry mesh. + mutable_mesh->CopyFrom(canonical_mesh_); + // Replace XYZ vertex mesh coodinates with the metric landmark positions. + for (int i = 0; i < canonical_mesh_num_vertices_; ++i) { + uint32_t vertex_buffer_offset = canonical_mesh_vertex_size_ * i + + canonical_mesh_vertex_position_offset_; + + mutable_mesh->set_vertex_buffer(vertex_buffer_offset, + metric_face_landmarks.landmark(i).x()); + mutable_mesh->set_vertex_buffer(vertex_buffer_offset + 1, + metric_face_landmarks.landmark(i).y()); + mutable_mesh->set_vertex_buffer(vertex_buffer_offset + 2, + metric_face_landmarks.landmark(i).z()); + } + // Populate the face pose transformation matrix. + mediapipe::MatrixDataProtoFromMatrix( + pose_transform_mat, face_geometry.mutable_pose_transform_matrix()); + + multi_face_geometry.push_back(face_geometry); + } + + return multi_face_geometry; + } + + private: + static bool IsScreenLandmarkListTooCompact( + const NormalizedLandmarkList& screen_landmarks) { + float mean_x = 0.f; + float mean_y = 0.f; + for (int i = 0; i < screen_landmarks.landmark_size(); ++i) { + const auto& landmark = screen_landmarks.landmark(i); + mean_x += (landmark.x() - mean_x) / static_cast(i + 1); + mean_y += (landmark.y() - mean_y) / static_cast(i + 1); + } + + float max_sq_dist = 0.f; + for (const auto& landmark : screen_landmarks.landmark()) { + const float d_x = landmark.x() - mean_x; + const float d_y = landmark.y() - mean_y; + max_sq_dist = std::max(max_sq_dist, d_x * d_x + d_y * d_y); + } + + static constexpr float kIsScreenLandmarkListTooCompactThreshold = 1e-3f; + return std::sqrt(max_sq_dist) <= kIsScreenLandmarkListTooCompactThreshold; + } + + const PerspectiveCamera perspective_camera_; + const Mesh3d canonical_mesh_; + const uint32_t canonical_mesh_vertex_size_; + const uint32_t canonical_mesh_num_vertices_; + const uint32_t canonical_mesh_vertex_position_offset_; + + std::unique_ptr space_converter_; +}; + +} // namespace + +absl::StatusOr> CreateGeometryPipeline( + const Environment& environment, const GeometryPipelineMetadata& metadata) { + MP_RETURN_IF_ERROR(ValidateEnvironment(environment)) + << "Invalid environment!"; + MP_RETURN_IF_ERROR(ValidateGeometryPipelineMetadata(metadata)) + << "Invalid geometry pipeline metadata!"; + + const auto& canonical_mesh = metadata.canonical_mesh(); + RET_CHECK(HasVertexComponent(canonical_mesh.vertex_type(), + VertexComponent::POSITION)) + << "Canonical face mesh must have the `POSITION` vertex component!"; + RET_CHECK(HasVertexComponent(canonical_mesh.vertex_type(), + VertexComponent::TEX_COORD)) + << "Canonical face mesh must have the `TEX_COORD` vertex component!"; + + uint32_t canonical_mesh_vertex_size = + GetVertexSize(canonical_mesh.vertex_type()); + uint32_t canonical_mesh_num_vertices = + canonical_mesh.vertex_buffer_size() / canonical_mesh_vertex_size; + uint32_t canonical_mesh_vertex_position_offset = + GetVertexComponentOffset(canonical_mesh.vertex_type(), + VertexComponent::POSITION) + .value(); + + // Put the Procrustes landmark basis into Eigen matrices for an easier access. + Eigen::Matrix3Xf canonical_metric_landmarks = + Eigen::Matrix3Xf::Zero(3, canonical_mesh_num_vertices); + Eigen::VectorXf landmark_weights = + Eigen::VectorXf::Zero(canonical_mesh_num_vertices); + + for (int i = 0; i < canonical_mesh_num_vertices; ++i) { + uint32_t vertex_buffer_offset = + canonical_mesh_vertex_size * i + canonical_mesh_vertex_position_offset; + + canonical_metric_landmarks(0, i) = + canonical_mesh.vertex_buffer(vertex_buffer_offset); + canonical_metric_landmarks(1, i) = + canonical_mesh.vertex_buffer(vertex_buffer_offset + 1); + canonical_metric_landmarks(2, i) = + canonical_mesh.vertex_buffer(vertex_buffer_offset + 2); + } + + for (const WeightedLandmarkRef& wlr : metadata.procrustes_landmark_basis()) { + uint32_t landmark_id = wlr.landmark_id(); + landmark_weights(landmark_id) = wlr.weight(); + } + + std::unique_ptr result = + absl::make_unique( + environment.perspective_camera(), canonical_mesh, + canonical_mesh_vertex_size, canonical_mesh_num_vertices, + canonical_mesh_vertex_position_offset, + absl::make_unique( + environment.origin_point_location(), + metadata.input_source() == InputSource::DEFAULT + ? InputSource::FACE_LANDMARK_PIPELINE + : metadata.input_source(), + std::move(canonical_metric_landmarks), + std::move(landmark_weights), + CreateFloatPrecisionProcrustesSolver())); + + return result; +} + +} // namespace mediapipe::face_geometry diff --git a/mediapipe/modules/face_geometry/libs/geometry_pipeline.h b/mediapipe/modules/face_geometry/libs/geometry_pipeline.h new file mode 100644 index 0000000..ffa779c --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/geometry_pipeline.h @@ -0,0 +1,67 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FACE_GEOMETRY_LIBS_GEOMETRY_PIPELINE_H_ +#define MEDIAPIPE_FACE_GEOMETRY_LIBS_GEOMETRY_PIPELINE_H_ + +#include +#include + +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.pb.h" + +namespace mediapipe::face_geometry { + +// Encapsulates a stateless estimator of facial geometry in a Metric space based +// on the normalized face landmarks in the Screen space. +class GeometryPipeline { + public: + virtual ~GeometryPipeline() = default; + + // Estimates geometry data for multiple faces. + // + // Returns an error status if any of the passed arguments is invalid. + // + // The result includes face geometry data for a subset of the input faces, + // however geometry data for some faces might be missing. This may happen if + // it'd be unstable to estimate the facial geometry based on a corresponding + // face landmark list for any reason (for example, if the landmark list is too + // compact). + // + // Each face landmark list must have the same number of landmarks as was + // passed upon initialization via the canonical face mesh (as a part of the + // geometry pipeline metadata). + // + // Both `frame_width` and `frame_height` must be positive. + virtual absl::StatusOr> EstimateFaceGeometry( + const std::vector& multi_face_landmarks, + int frame_width, int frame_height) const = 0; +}; + +// Creates an instance of `GeometryPipeline`. +// +// Both `environment` and `metadata` must be valid (for details, please refer to +// the proto message definition comments and/or `validation_utils.h/cc`). +// +// Canonical face mesh (defined as a part of `metadata`) must have the +// `POSITION` and the `TEX_COORD` vertex components. +absl::StatusOr> CreateGeometryPipeline( + const Environment& environment, const GeometryPipelineMetadata& metadata); + +} // namespace mediapipe::face_geometry + +#endif // MEDIAPIPE_FACE_GEOMETRY_LIBS_GEOMETRY_PIPELINE_H_ diff --git a/mediapipe/modules/face_geometry/libs/mesh_3d_utils.cc b/mediapipe/modules/face_geometry/libs/mesh_3d_utils.cc new file mode 100644 index 0000000..2078ec6 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/mesh_3d_utils.cc @@ -0,0 +1,103 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/face_geometry/libs/mesh_3d_utils.h" + +#include +#include + +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { +namespace { + +bool HasVertexComponentVertexPT(VertexComponent vertex_component) { + switch (vertex_component) { + case VertexComponent::POSITION: + case VertexComponent::TEX_COORD: + return true; + + default: + return false; + } +} + +uint32_t GetVertexComponentSizeVertexPT(VertexComponent vertex_component) { + switch (vertex_component) { + case VertexComponent::POSITION: + return 3; + case VertexComponent::TEX_COORD: + return 2; + } +} + +uint32_t GetVertexComponentOffsetVertexPT(VertexComponent vertex_component) { + switch (vertex_component) { + case VertexComponent::POSITION: + return 0; + case VertexComponent::TEX_COORD: + return GetVertexComponentSizeVertexPT(VertexComponent::POSITION); + } +} + +} // namespace + +std::size_t GetVertexSize(Mesh3d::VertexType vertex_type) { + switch (vertex_type) { + case Mesh3d::VERTEX_PT: + return GetVertexComponentSizeVertexPT(VertexComponent::POSITION) + + GetVertexComponentSizeVertexPT(VertexComponent::TEX_COORD); + } +} + +std::size_t GetPrimitiveSize(Mesh3d::PrimitiveType primitive_type) { + switch (primitive_type) { + case Mesh3d::TRIANGLE: + return 3; + } +} + +bool HasVertexComponent(Mesh3d::VertexType vertex_type, + VertexComponent vertex_component) { + switch (vertex_type) { + case Mesh3d::VERTEX_PT: + return HasVertexComponentVertexPT(vertex_component); + } +} + +absl::StatusOr GetVertexComponentOffset( + Mesh3d::VertexType vertex_type, VertexComponent vertex_component) { + RET_CHECK(HasVertexComponentVertexPT(vertex_component)) + << "A given vertex type doesn't have the requested component!"; + + switch (vertex_type) { + case Mesh3d::VERTEX_PT: + return GetVertexComponentOffsetVertexPT(vertex_component); + } +} + +absl::StatusOr GetVertexComponentSize( + Mesh3d::VertexType vertex_type, VertexComponent vertex_component) { + RET_CHECK(HasVertexComponentVertexPT(vertex_component)) + << "A given vertex type doesn't have the requested component!"; + + switch (vertex_type) { + case Mesh3d::VERTEX_PT: + return GetVertexComponentSizeVertexPT(vertex_component); + } +} + +} // namespace mediapipe::face_geometry diff --git a/mediapipe/modules/face_geometry/libs/mesh_3d_utils.h b/mediapipe/modules/face_geometry/libs/mesh_3d_utils.h new file mode 100644 index 0000000..a320aae --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/mesh_3d_utils.h @@ -0,0 +1,51 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FACE_GEOMETRY_LIBS_MESH_3D_UTILS_H_ +#define MEDIAPIPE_FACE_GEOMETRY_LIBS_MESH_3D_UTILS_H_ + +#include +#include + +#include "mediapipe/framework/port/statusor.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { + +enum class VertexComponent { POSITION, TEX_COORD }; + +std::size_t GetVertexSize(Mesh3d::VertexType vertex_type); + +std::size_t GetPrimitiveSize(Mesh3d::PrimitiveType primitive_type); + +bool HasVertexComponent(Mesh3d::VertexType vertex_type, + VertexComponent vertex_component); + +// Computes the vertex component offset. +// +// Returns an error status if a given vertex type doesn't have the requested +// component. +absl::StatusOr GetVertexComponentOffset( + Mesh3d::VertexType vertex_type, VertexComponent vertex_component); + +// Computes the vertex component size. +// +// Returns an error status if a given vertex type doesn't have the requested +// component. +absl::StatusOr GetVertexComponentSize( + Mesh3d::VertexType vertex_type, VertexComponent vertex_component); + +} // namespace mediapipe::face_geometry + +#endif // MEDIAPIPE_FACE_GEOMETRY_LIBS_MESH_3D_UTILS_H_ diff --git a/mediapipe/modules/face_geometry/libs/procrustes_solver.cc b/mediapipe/modules/face_geometry/libs/procrustes_solver.cc new file mode 100644 index 0000000..2ffae0e --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/procrustes_solver.cc @@ -0,0 +1,266 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/face_geometry/libs/procrustes_solver.h" + +#include +#include + +#include "Eigen/Dense" +#include "absl/memory/memory.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/statusor.h" + +namespace mediapipe { +namespace face_geometry { +namespace { + +class FloatPrecisionProcrustesSolver : public ProcrustesSolver { + public: + FloatPrecisionProcrustesSolver() = default; + + absl::Status SolveWeightedOrthogonalProblem( + const Eigen::Matrix3Xf& source_points, // + const Eigen::Matrix3Xf& target_points, // + const Eigen::VectorXf& point_weights, + Eigen::Matrix4f& transform_mat) const override { + // Validate inputs. + MP_RETURN_IF_ERROR(ValidateInputPoints(source_points, target_points)) + << "Failed to validate weighted orthogonal problem input points!"; + MP_RETURN_IF_ERROR( + ValidatePointWeights(source_points.cols(), point_weights)) + << "Failed to validate weighted orthogonal problem point weights!"; + + // Extract square root from the point weights. + Eigen::VectorXf sqrt_weights = ExtractSquareRoot(point_weights); + + // Try to solve the WEOP problem. + MP_RETURN_IF_ERROR(InternalSolveWeightedOrthogonalProblem( + source_points, target_points, sqrt_weights, transform_mat)) + << "Failed to solve the WEOP problem!"; + + return absl::OkStatus(); + } + + private: + static constexpr float kAbsoluteErrorEps = 1e-9f; + + static absl::Status ValidateInputPoints( + const Eigen::Matrix3Xf& source_points, + const Eigen::Matrix3Xf& target_points) { + RET_CHECK_GT(source_points.cols(), 0) + << "The number of source points must be positive!"; + + RET_CHECK_EQ(source_points.cols(), target_points.cols()) + << "The number of source and target points must be equal!"; + + return absl::OkStatus(); + } + + static absl::Status ValidatePointWeights( + int num_points, const Eigen::VectorXf& point_weights) { + RET_CHECK_GT(point_weights.size(), 0) + << "The number of point weights must be positive!"; + + RET_CHECK_EQ(point_weights.size(), num_points) + << "The number of points and point weights must be equal!"; + + float total_weight = 0.f; + for (int i = 0; i < num_points; ++i) { + RET_CHECK_GE(point_weights(i), 0.f) + << "Each point weight must be non-negative!"; + + total_weight += point_weights(i); + } + + RET_CHECK_GT(total_weight, kAbsoluteErrorEps) + << "The total point weight is too small!"; + + return absl::OkStatus(); + } + + static Eigen::VectorXf ExtractSquareRoot( + const Eigen::VectorXf& point_weights) { + Eigen::VectorXf sqrt_weights(point_weights); + for (int i = 0; i < sqrt_weights.size(); ++i) { + sqrt_weights(i) = std::sqrt(sqrt_weights(i)); + } + + return sqrt_weights; + } + + // Combines a 3x3 rotation-and-scale matrix and a 3x1 translation vector into + // a single 4x4 transformation matrix. + static Eigen::Matrix4f CombineTransformMatrix(const Eigen::Matrix3f& r_and_s, + const Eigen::Vector3f& t) { + Eigen::Matrix4f result = Eigen::Matrix4f::Identity(); + result.leftCols(3).topRows(3) = r_and_s; + result.col(3).topRows(3) = t; + + return result; + } + + // The weighted problem is thoroughly addressed in Section 2.4 of: + // D. Akca, Generalized Procrustes analysis and its applications + // in photogrammetry, 2003, https://doi.org/10.3929/ethz-a-004656648 + // + // Notable differences in the code presented here are: + // + // * In the paper, the weights matrix W_p is Cholesky-decomposed as Q^T Q. + // Our W_p is diagonal (equal to diag(sqrt_weights^2)), + // so we can just set Q = diag(sqrt_weights) instead. + // + // * In the paper, the problem is presented as + // (for W_k = I and W_p = tranposed(Q) Q): + // || Q (c A T + j tranposed(t) - B) || -> min. + // + // We reformulate it as an equivalent minimization of the transpose's + // norm: + // || (c tranposed(T) tranposed(A) - tranposed(B)) tranposed(Q) || -> min, + // where tranposed(A) and tranposed(B) are the source and the target point + // clouds, respectively, c tranposed(T) is the rotation+scaling R sought + // for, and Q is diag(sqrt_weights). + // + // Most of the derivations are therefore transposed. + // + // Note: the output `transform_mat` argument is used instead of `StatusOr<>` + // return type in order to avoid Eigen memory alignment issues. Details: + // https://eigen.tuxfamily.org/dox/group__TopicStructHavingEigenMembers.html + static absl::Status InternalSolveWeightedOrthogonalProblem( + const Eigen::Matrix3Xf& sources, const Eigen::Matrix3Xf& targets, + const Eigen::VectorXf& sqrt_weights, Eigen::Matrix4f& transform_mat) { + // tranposed(A_w). + Eigen::Matrix3Xf weighted_sources = + sources.array().rowwise() * sqrt_weights.array().transpose(); + // tranposed(B_w). + Eigen::Matrix3Xf weighted_targets = + targets.array().rowwise() * sqrt_weights.array().transpose(); + + // w = tranposed(j_w) j_w. + float total_weight = sqrt_weights.cwiseProduct(sqrt_weights).sum(); + + // Let C = (j_w tranposed(j_w)) / (tranposed(j_w) j_w). + // Note that C = tranposed(C), hence (I - C) = tranposed(I - C). + // + // tranposed(A_w) C = tranposed(A_w) j_w tranposed(j_w) / w = + // (tranposed(A_w) j_w) tranposed(j_w) / w = c_w tranposed(j_w), + // + // where c_w = tranposed(A_w) j_w / w is a k x 1 vector calculated here: + Eigen::Matrix3Xf twice_weighted_sources = + weighted_sources.array().rowwise() * sqrt_weights.array().transpose(); + Eigen::Vector3f source_center_of_mass = + twice_weighted_sources.rowwise().sum() / total_weight; + // tranposed((I - C) A_w) = tranposed(A_w) (I - C) = + // tranposed(A_w) - tranposed(A_w) C = tranposed(A_w) - c_w tranposed(j_w). + Eigen::Matrix3Xf centered_weighted_sources = + weighted_sources - source_center_of_mass * sqrt_weights.transpose(); + + Eigen::Matrix3f rotation; + MP_RETURN_IF_ERROR(ComputeOptimalRotation( + weighted_targets * centered_weighted_sources.transpose(), rotation)) + << "Failed to compute the optimal rotation!"; + ASSIGN_OR_RETURN( + float scale, + ComputeOptimalScale(centered_weighted_sources, weighted_sources, + weighted_targets, rotation), + _ << "Failed to compute the optimal scale!"); + + // R = c tranposed(T). + Eigen::Matrix3f rotation_and_scale = scale * rotation; + + // Compute optimal translation for the weighted problem. + + // tranposed(B_w - c A_w T) = tranposed(B_w) - R tranposed(A_w) in (54). + const auto pointwise_diffs = + weighted_targets - rotation_and_scale * weighted_sources; + // Multiplication by j_w is a respectively weighted column sum. + // (54) from the paper. + const auto weighted_pointwise_diffs = + pointwise_diffs.array().rowwise() * sqrt_weights.array().transpose(); + Eigen::Vector3f translation = + weighted_pointwise_diffs.rowwise().sum() / total_weight; + + transform_mat = CombineTransformMatrix(rotation_and_scale, translation); + + return absl::OkStatus(); + } + + // `design_matrix` is a transposed LHS of (51) in the paper. + // + // Note: the output `rotation` argument is used instead of `StatusOr<>` + // return type in order to avoid Eigen memory alignment issues. Details: + // https://eigen.tuxfamily.org/dox/group__TopicStructHavingEigenMembers.html + static absl::Status ComputeOptimalRotation( + const Eigen::Matrix3f& design_matrix, Eigen::Matrix3f& rotation) { + RET_CHECK_GT(design_matrix.norm(), kAbsoluteErrorEps) + << "Design matrix norm is too small!"; + + Eigen::JacobiSVD svd( + design_matrix, Eigen::ComputeFullU | Eigen::ComputeFullV); + + Eigen::Matrix3f postrotation = svd.matrixU(); + Eigen::Matrix3f prerotation = svd.matrixV().transpose(); + + // Disallow reflection by ensuring that det(`rotation`) = +1 (and not -1), + // see "4.6 Constrained orthogonal Procrustes problems" + // in the Gower & Dijksterhuis's book "Procrustes Analysis". + // We flip the sign of the least singular value along with a column in W. + // + // Note that now the sum of singular values doesn't work for scale + // estimation due to this sign flip. + if (postrotation.determinant() * prerotation.determinant() < + static_cast(0)) { + postrotation.col(2) *= static_cast(-1); + } + + // Transposed (52) from the paper. + rotation = postrotation * prerotation; + return absl::OkStatus(); + } + + static absl::StatusOr ComputeOptimalScale( + const Eigen::Matrix3Xf& centered_weighted_sources, + const Eigen::Matrix3Xf& weighted_sources, + const Eigen::Matrix3Xf& weighted_targets, + const Eigen::Matrix3f& rotation) { + // tranposed(T) tranposed(A_w) (I - C). + const auto rotated_centered_weighted_sources = + rotation * centered_weighted_sources; + // Use the identity trace(A B) = sum(A * B^T) + // to avoid building large intermediate matrices (* is Hadamard product). + // (53) from the paper. + float numerator = + rotated_centered_weighted_sources.cwiseProduct(weighted_targets).sum(); + float denominator = + centered_weighted_sources.cwiseProduct(weighted_sources).sum(); + + RET_CHECK_GT(denominator, kAbsoluteErrorEps) + << "Scale expression denominator is too small!"; + RET_CHECK_GT(numerator / denominator, kAbsoluteErrorEps) + << "Scale is too small!"; + + return numerator / denominator; + } +}; + +} // namespace + +std::unique_ptr CreateFloatPrecisionProcrustesSolver() { + return absl::make_unique(); +} + +} // namespace face_geometry +} // namespace mediapipe diff --git a/mediapipe/modules/face_geometry/libs/procrustes_solver.h b/mediapipe/modules/face_geometry/libs/procrustes_solver.h new file mode 100644 index 0000000..c34b8f6 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/procrustes_solver.h @@ -0,0 +1,70 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FACE_GEOMETRY_LIBS_PROCRUSTES_SOLVER_H_ +#define MEDIAPIPE_FACE_GEOMETRY_LIBS_PROCRUSTES_SOLVER_H_ + +#include + +#include "Eigen/Dense" +#include "mediapipe/framework/port/status.h" + +namespace mediapipe::face_geometry { + +// Encapsulates a stateless solver for the Weighted Extended Orthogonal +// Procrustes (WEOP) Problem, as defined in Section 2.4 of +// https://doi.org/10.3929/ethz-a-004656648. +// +// Given the source and the target point clouds, the algorithm estimates +// a 4x4 transformation matrix featuring the following semantic components: +// +// * Uniform scale +// * Rotation +// * Translation +// +// The matrix maps the source point cloud into the target point cloud minimizing +// the Mean Squared Error. +class ProcrustesSolver { + public: + virtual ~ProcrustesSolver() = default; + + // Solves the Weighted Extended Orthogonal Procrustes (WEOP) Problem. + // + // All `source_points`, `target_points` and `point_weights` must define the + // same number of points. Elements of `point_weights` must be non-negative. + // + // A too small diameter of either of the point clouds will likely lead to + // numerical instabilities and failure to estimate the transformation. + // + // A too small point cloud total weight will likely lead to numerical + // instabilities and failure to estimate the transformation too. + // + // Small point coordinate deviation for either of the point cloud will likely + // result in a failure as it will make the solution very unstable if possible. + // + // Note: the output `transform_mat` argument is used instead of `StatusOr<>` + // return type in order to avoid Eigen memory alignment issues. Details: + // https://eigen.tuxfamily.org/dox/group__TopicStructHavingEigenMembers.html + virtual absl::Status SolveWeightedOrthogonalProblem( + const Eigen::Matrix3Xf& source_points, // + const Eigen::Matrix3Xf& target_points, // + const Eigen::VectorXf& point_weights, // + Eigen::Matrix4f& transform_mat) const = 0; +}; + +std::unique_ptr CreateFloatPrecisionProcrustesSolver(); + +} // namespace mediapipe::face_geometry + +#endif // MEDIAPIPE_FACE_GEOMETRY_LIBS_PROCRUSTES_SOLVER_H_ diff --git a/mediapipe/modules/face_geometry/libs/validation_utils.cc b/mediapipe/modules/face_geometry/libs/validation_utils.cc new file mode 100644 index 0000000..eb4fd08 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/validation_utils.cc @@ -0,0 +1,126 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/face_geometry/libs/validation_utils.h" + +#include +#include + +#include "mediapipe/framework/formats/matrix_data.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/modules/face_geometry/libs/mesh_3d_utils.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { + +absl::Status ValidatePerspectiveCamera( + const PerspectiveCamera& perspective_camera) { + static constexpr float kAbsoluteErrorEps = 1e-9f; + + RET_CHECK_GT(perspective_camera.near(), kAbsoluteErrorEps) + << "Near Z must be greater than 0 with a margin of 10^{-9}!"; + + RET_CHECK_GT(perspective_camera.far(), + perspective_camera.near() + kAbsoluteErrorEps) + << "Far Z must be greater than Near Z with a margin of 10^{-9}!"; + + RET_CHECK_GT(perspective_camera.vertical_fov_degrees(), kAbsoluteErrorEps) + << "Vertical FOV must be positive with a margin of 10^{-9}!"; + + RET_CHECK_LT(perspective_camera.vertical_fov_degrees() + kAbsoluteErrorEps, + 180.f) + << "Vertical FOV must be less than 180 degrees with a margin of 10^{-9}"; + + return absl::OkStatus(); +} + +absl::Status ValidateEnvironment(const Environment& environment) { + MP_RETURN_IF_ERROR( + ValidatePerspectiveCamera(environment.perspective_camera())) + << "Invalid perspective camera!"; + + return absl::OkStatus(); +} + +absl::Status ValidateMesh3d(const Mesh3d& mesh_3d) { + const std::size_t vertex_size = GetVertexSize(mesh_3d.vertex_type()); + const std::size_t primitive_type = GetPrimitiveSize(mesh_3d.primitive_type()); + + RET_CHECK_EQ(mesh_3d.vertex_buffer_size() % vertex_size, 0) + << "Vertex buffer size must a multiple of the vertex size!"; + + RET_CHECK_EQ(mesh_3d.index_buffer_size() % primitive_type, 0) + << "Index buffer size must a multiple of the primitive size!"; + + const int num_vertices = mesh_3d.vertex_buffer_size() / vertex_size; + for (uint32_t idx : mesh_3d.index_buffer()) { + RET_CHECK_LT(idx, num_vertices) + << "All mesh indices must refer to an existing vertex!"; + } + + return absl::OkStatus(); +} + +absl::Status ValidateFaceGeometry(const FaceGeometry& face_geometry) { + MP_RETURN_IF_ERROR(ValidateMesh3d(face_geometry.mesh())) << "Invalid mesh!"; + + static constexpr char kInvalid4x4MatrixMessage[] = + "Pose transformation matrix must be a 4x4 matrix!"; + + const MatrixData& pose_transform_matrix = + face_geometry.pose_transform_matrix(); + RET_CHECK_EQ(pose_transform_matrix.rows(), 4) << kInvalid4x4MatrixMessage; + RET_CHECK_EQ(pose_transform_matrix.rows(), 4) << kInvalid4x4MatrixMessage; + RET_CHECK_EQ(pose_transform_matrix.packed_data_size(), 16) + << kInvalid4x4MatrixMessage; + + return absl::OkStatus(); +} + +absl::Status ValidateGeometryPipelineMetadata( + const GeometryPipelineMetadata& metadata) { + MP_RETURN_IF_ERROR(ValidateMesh3d(metadata.canonical_mesh())) + << "Invalid canonical mesh!"; + + RET_CHECK_GT(metadata.procrustes_landmark_basis_size(), 0) + + << "Procrustes landmark basis must be non-empty!"; + + const int num_vertices = + metadata.canonical_mesh().vertex_buffer_size() / + GetVertexSize(metadata.canonical_mesh().vertex_type()); + for (const WeightedLandmarkRef& wlr : metadata.procrustes_landmark_basis()) { + RET_CHECK_LT(wlr.landmark_id(), num_vertices) + << "All Procrustes basis indices must refer to an existing canonical " + "mesh vertex!"; + + RET_CHECK_GE(wlr.weight(), 0.f) + << "All Procrustes basis landmarks must have a non-negative weight!"; + } + + return absl::OkStatus(); +} + +absl::Status ValidateFrameDimensions(int frame_width, int frame_height) { + RET_CHECK_GT(frame_width, 0) << "Frame width must be positive!"; + RET_CHECK_GT(frame_height, 0) << "Frame height must be positive!"; + + return absl::OkStatus(); +} + +} // namespace mediapipe::face_geometry diff --git a/mediapipe/modules/face_geometry/libs/validation_utils.h b/mediapipe/modules/face_geometry/libs/validation_utils.h new file mode 100644 index 0000000..c0a7e08 --- /dev/null +++ b/mediapipe/modules/face_geometry/libs/validation_utils.h @@ -0,0 +1,70 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FACE_GEOMETRY_LIBS_VALIDATION_UTILS_H_ +#define MEDIAPIPE_FACE_GEOMETRY_LIBS_VALIDATION_UTILS_H_ + +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/face_geometry/protos/environment.pb.h" +#include "mediapipe/modules/face_geometry/protos/face_geometry.pb.h" +#include "mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.pb.h" +#include "mediapipe/modules/face_geometry/protos/mesh_3d.pb.h" + +namespace mediapipe::face_geometry { + +// Validates `perspective_camera`. +// +// Near Z must be greater than 0 with a margin of `1e-9`. +// Far Z must be greater than Near Z with a margin of `1e-9`. +// Vertical FOV must be in range (0, 180) with a margin of `1e-9` on the range +// edges. +absl::Status ValidatePerspectiveCamera( + const PerspectiveCamera& perspective_camera); + +// Validates `environment`. +// +// Environment's perspective camera must be valid. +absl::Status ValidateEnvironment(const Environment& environment); + +// Validates `mesh_3d`. +// +// Mesh vertex buffer size must a multiple of the vertex size. +// Mesh index buffer size must a multiple of the primitive size. +// All mesh indices must reference an existing mesh vertex. +absl::Status ValidateMesh3d(const Mesh3d& mesh_3d); + +// Validates `face_geometry`. +// +// Face mesh must be valid. +// Face pose transformation matrix must be a 4x4 matrix. +absl::Status ValidateFaceGeometry(const FaceGeometry& face_geometry); + +// Validates `metadata`. +// +// Canonical face mesh must be valid. +// Procrustes landmark basis must be non-empty. +// All Procrustes basis indices must reference an existing canonical mesh +// vertex. +// All Procrustes basis landmarks must have a non-negative weight. +absl::Status ValidateGeometryPipelineMetadata( + const GeometryPipelineMetadata& metadata); + +// Validates frame dimensions. +// +// Both frame width and frame height must be positive. +absl::Status ValidateFrameDimensions(int frame_width, int frame_height); + +} // namespace mediapipe::face_geometry + +#endif // MEDIAPIPE_FACE_GEOMETRY_LIBS_VALIDATION_UTILS_H_ diff --git a/mediapipe/modules/face_geometry/protos/BUILD b/mediapipe/modules/face_geometry/protos/BUILD new file mode 100644 index 0000000..48b7b66 --- /dev/null +++ b/mediapipe/modules/face_geometry/protos/BUILD @@ -0,0 +1,46 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_proto_library( + name = "environment_proto", + srcs = ["environment.proto"], +) + +mediapipe_proto_library( + name = "face_geometry_proto", + srcs = ["face_geometry.proto"], + deps = [ + ":mesh_3d_proto", + "//mediapipe/framework/formats:matrix_data_proto", + ], +) + +mediapipe_proto_library( + name = "geometry_pipeline_metadata_proto", + srcs = ["geometry_pipeline_metadata.proto"], + deps = [ + ":mesh_3d_proto", + ], +) + +mediapipe_proto_library( + name = "mesh_3d_proto", + srcs = ["mesh_3d.proto"], +) diff --git a/mediapipe/modules/face_geometry/protos/environment.proto b/mediapipe/modules/face_geometry/protos/environment.proto new file mode 100644 index 0000000..cca3f29 --- /dev/null +++ b/mediapipe/modules/face_geometry/protos/environment.proto @@ -0,0 +1,84 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe.face_geometry; + +option java_package = "com.google.mediapipe.modules.facegeometry"; +option java_outer_classname = "EnvironmentProto"; + +// Defines the (0, 0) origin point location of the environment. +// +// The variation in the origin point location can be traced back to the memory +// layout of the camera video frame buffers. +// +// Usually, the memory layout for most CPU (and also some GPU) camera video +// frame buffers results in having the (0, 0) origin point located in the +// Top Left corner. +// +// On the contrary, the memory layout for most GPU camera video frame buffers +// results in having the (0, 0) origin point located in the Bottom Left corner. +// +// Let's consider the following example: +// +// (A) ---------------+ +// ___ | +// | (1) | | | +// | / \ | | | +// | |---|===|-| | +// | |---| | | | +// | / \ | | | +// | | | | | | +// | | (2) |=| | | +// | | | | | | +// | |_______| |_| | +// | |@| |@| | | | +// | ___________|_|_ | +// | +// (B) ---------------+ +// +// On this example, (1) and (2) have the same X coordinate regardless of the +// origin point location. However, having the origin point located at (A) +// (Top Left corner) results in (1) having a smaller Y coordinate if compared to +// (2). Similarly, having the origin point located at (B) (Bottom Left corner) +// results in (1) having a greater Y coordinate if compared to (2). +// +// Providing the correct origin point location for your environment and making +// sure all the input landmarks are in-sync with this location is crucial +// for receiving the correct output face geometry and visual renders. +enum OriginPointLocation { + BOTTOM_LEFT_CORNER = 1; + TOP_LEFT_CORNER = 2; +} + +// The perspective camera is defined through its vertical FOV angle and the +// Z-clipping planes. The aspect ratio is a runtime variable for the face +// geometry module and should be provided alongside the face landmarks in order +// to estimate the face geometry on a given frame. +// +// More info on Perspective Cameras: +// http://www.songho.ca/opengl/gl_projectionmatrix.html#perspective +message PerspectiveCamera { + // `0 < vertical_fov_degrees < 180`. + optional float vertical_fov_degrees = 1; + // `0 < near < far`. + optional float near = 2; + optional float far = 3; +} + +message Environment { + optional OriginPointLocation origin_point_location = 1; + optional PerspectiveCamera perspective_camera = 2; +} diff --git a/mediapipe/modules/face_geometry/protos/face_geometry.proto b/mediapipe/modules/face_geometry/protos/face_geometry.proto new file mode 100644 index 0000000..b91a7d7 --- /dev/null +++ b/mediapipe/modules/face_geometry/protos/face_geometry.proto @@ -0,0 +1,60 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe.face_geometry; + +import "mediapipe/framework/formats/matrix_data.proto"; +import "mediapipe/modules/face_geometry/protos/mesh_3d.proto"; + +option java_package = "com.google.mediapipe.modules.facegeometry"; +option java_outer_classname = "FaceGeometryProto"; + +// Defines the face geometry pipeline estimation result format. +message FaceGeometry { + // Defines a mesh surface for a face. The face mesh vertex IDs are the same as + // the face landmark IDs. + // + // XYZ coordinates exist in the right-handed Metric 3D space configured by an + // environment. UV coodinates are taken from the canonical face mesh model. + // + // XY coordinates are guaranteed to match the screen positions of + // the input face landmarks after (1) being multiplied by the face pose + // transformation matrix and then (2) being projected with a perspective + // camera matrix of the same environment. + // + // NOTE: the triangular topology of the face mesh is only useful when derived + // from the 468 face landmarks, not from the 6 face detection landmarks + // (keypoints). The former don't cover the entire face and this mesh is + // defined here only to comply with the API. It should be considered as + // a placeholder and/or for debugging purposes. + // + // Use the face geometry derived from the face detection landmarks + // (keypoints) for the face pose transformation matrix, not the mesh. + optional Mesh3d mesh = 1; + + // Defines a face pose transformation matrix, which provides mapping from + // the static canonical face model to the runtime face. Tries to distinguish + // a head pose change from a facial expression change and to only reflect the + // former. + // + // Is a 4x4 matrix and contains only the following components: + // * Uniform scale + // * Rotation + // * Translation + // + // The last row is guaranteed to be `[0 0 0 1]`. + optional MatrixData pose_transform_matrix = 2; +} diff --git a/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto b/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto new file mode 100644 index 0000000..dac0e25 --- /dev/null +++ b/mediapipe/modules/face_geometry/protos/geometry_pipeline_metadata.proto @@ -0,0 +1,63 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe.face_geometry; + +import "mediapipe/modules/face_geometry/protos/mesh_3d.proto"; + +option java_package = "com.google.mediapipe.modules.facegeometry"; +option java_outer_classname = "GeometryPipelineMetadataProto"; + +enum InputSource { + DEFAULT = 0; // FACE_LANDMARK_PIPELINE + FACE_LANDMARK_PIPELINE = 1; + FACE_DETECTION_PIPELINE = 2; +} + +message WeightedLandmarkRef { + // Defines the landmark ID. References an existing face landmark ID. + optional uint32 landmark_id = 1; + // Defines the landmark weight. The larger the weight the more influence this + // landmark has in the basis. + // + // Is positive. + optional float weight = 2; +} + +// Next field ID: 4 +message GeometryPipelineMetadata { + // Defines the source of the input landmarks to let the underlying geometry + // pipeline to adjust in order to produce the best results. + // + // Face landmark pipeline is expected to produce 3D landmarks with relative Z + // coordinate, which is scaled as the X coordinate assuming the weak + // perspective projection camera model. + // + // Face landmark pipeline is expected to produce 2D landmarks with Z + // coordinate being equal to 0. + optional InputSource input_source = 3; + // Defines a mesh surface for a canonical face. The canonical face mesh vertex + // IDs are the same as the face landmark IDs. + // + // XYZ coordinates are defined in centimeter units. + optional Mesh3d canonical_mesh = 1; + // Defines a weighted landmark basis for running the Procrustes solver + // algorithm inside the geometry pipeline. + // + // A good basis sets face landmark weights in way to distinguish a head pose + // change from a facial expression change and to only respond to the former. + repeated WeightedLandmarkRef procrustes_landmark_basis = 2; +} diff --git a/mediapipe/modules/face_geometry/protos/mesh_3d.proto b/mediapipe/modules/face_geometry/protos/mesh_3d.proto new file mode 100644 index 0000000..4db45c1 --- /dev/null +++ b/mediapipe/modules/face_geometry/protos/mesh_3d.proto @@ -0,0 +1,41 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe.face_geometry; + +option java_package = "com.google.mediapipe.modules.facegeometry"; +option java_outer_classname = "Mesh3dProto"; + +message Mesh3d { + enum VertexType { + // Is defined by 5 coordinates: Position (XYZ) + Texture coordinate (UV). + VERTEX_PT = 0; + } + + enum PrimitiveType { + // Is defined by 3 indices: triangle vertex IDs. + TRIANGLE = 0; + } + + optional VertexType vertex_type = 1; + optional PrimitiveType primitive_type = 2; + // Vertex buffer size is a multiple of the vertex size (e.g., 5 for + // VERTEX_PT). + repeated float vertex_buffer = 3; + // Index buffer size is a multiple of the primitive size (e.g., 3 for + // TRIANGLE). + repeated uint32 index_buffer = 4; +} diff --git a/mediapipe/modules/face_landmark/BUILD b/mediapipe/modules/face_landmark/BUILD new file mode 100644 index 0000000..f155e46 --- /dev/null +++ b/mediapipe/modules/face_landmark/BUILD @@ -0,0 +1,190 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "face_landmark_cpu", + graph = "face_landmark_cpu.pbtxt", + register_as = "FaceLandmarkCpu", + deps = [ + ":face_landmarks_model_loader", + ":tensors_to_face_landmarks", + ":tensors_to_face_landmarks_with_attention", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_gpu", + graph = "face_landmark_gpu.pbtxt", + register_as = "FaceLandmarkGpu", + deps = [ + ":face_landmarks_model_loader", + ":tensors_to_face_landmarks", + ":tensors_to_face_landmarks_with_attention", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_front_cpu", + graph = "face_landmark_front_cpu.pbtxt", + register_as = "FaceLandmarkFrontCpu", + deps = [ + ":face_detection_front_detection_to_roi", + ":face_landmark_cpu", + ":face_landmark_landmarks_to_roi", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_cpu", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_front_gpu", + graph = "face_landmark_front_gpu.pbtxt", + register_as = "FaceLandmarkFrontGpu", + deps = [ + ":face_detection_front_detection_to_roi", + ":face_landmark_gpu", + ":face_landmark_landmarks_to_roi", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_front_cpu_image", + graph = "face_landmark_front_cpu_image.pbtxt", + register_as = "FaceLandmarkFrontCpuImage", + deps = [ + ":face_landmark_front_cpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_front_gpu_image", + graph = "face_landmark_front_gpu_image.pbtxt", + register_as = "FaceLandmarkFrontGpuImage", + deps = [ + ":face_landmark_front_gpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + ], +) + +exports_files( + srcs = [ + "face_landmark.tflite", + "face_landmark_with_attention.tflite", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_front_detection_to_roi", + graph = "face_detection_front_detection_to_roi.pbtxt", + register_as = "FaceDetectionFrontDetectionToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmark_landmarks_to_roi", + graph = "face_landmark_landmarks_to_roi.pbtxt", + register_as = "FaceLandmarkLandmarksToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmarks_model_loader", + graph = "face_landmarks_model_loader.pbtxt", + register_as = "FaceLandmarksModelLoader", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "tensors_to_face_landmarks", + graph = "tensors_to_face_landmarks.pbtxt", + register_as = "TensorsToFaceLandmarks", + deps = [ + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "tensors_to_face_landmarks_with_attention", + graph = "tensors_to_face_landmarks_with_attention.pbtxt", + register_as = "TensorsToFaceLandmarksWithAttention", + deps = [ + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:landmarks_refinement_calculator", + ], +) diff --git a/mediapipe/modules/face_landmark/README.md b/mediapipe/modules/face_landmark/README.md new file mode 100644 index 0000000..eed21a2 --- /dev/null +++ b/mediapipe/modules/face_landmark/README.md @@ -0,0 +1,9 @@ +# face_landmark + +Subgraphs|Details +:--- | :--- +[`FaceLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt)| Detects landmarks on a single face. (CPU input, and inference is executed on CPU.) +[`FaceLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt)| Detects landmarks on a single face. (GPU input, and inference is executed on GPU) +[`FaceLandmarkFrontCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt)| Detects and tracks landmarks on multiple faces. (CPU input, and inference is executed on CPU) +[`FaceLandmarkFrontGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt)| Detects and tracks landmarks on multiple faces. (GPU input, and inference is executed on GPU.) + diff --git a/mediapipe/modules/face_landmark/face_detection_front_detection_to_roi.pbtxt b/mediapipe/modules/face_landmark/face_detection_front_detection_to_roi.pbtxt new file mode 100644 index 0000000..acc9476 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_detection_front_detection_to_roi.pbtxt @@ -0,0 +1,47 @@ +# MediaPipe graph to calculate face region of interest (ROI) from the very +# first face detection in the vector of detections provided by +# "FaceDetectionShortRangeCpu" or "FaceDetectionShortRangeGpu" +# +# NOTE: this graph is subject to change and should not be used directly. + +type: "FaceDetectionFrontDetectionToRoi" + +# Face detection. (Detection) +input_stream: "DETECTION:detection" +# Frame size (width and height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" +# ROI according to the first detection of input detections. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts results of face detection into a rectangle (normalized by image size) +# that encloses the face and is rotated such that the line connecting left eye +# and right eye is aligned with the X-axis of the rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:initial_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 # Left eye. + rotation_vector_end_keypoint_index: 1 # Right eye. + rotation_vector_target_angle_degrees: 0 + } + } +} + +# Expands and shifts the rectangle that contains the face so that it's likely +# to cover the entire face. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:initial_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.5 + scale_y: 1.5 + square_long: true + } + } +} diff --git a/mediapipe/modules/face_landmark/face_landmark.tflite b/mediapipe/modules/face_landmark/face_landmark.tflite new file mode 100755 index 0000000..573285d Binary files /dev/null and b/mediapipe/modules/face_landmark/face_landmark.tflite differ diff --git a/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt new file mode 100644 index 0000000..4604fc7 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_cpu.pbtxt @@ -0,0 +1,184 @@ +# MediaPipe graph to detect/predict face landmarks. (CPU input, and inference is +# executed on CPU.) +# +# It is required that "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# path during execution if `with_attention` is not set or set to `false`. +# +# It is required that "face_landmark_with_attention.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark_with_attention.tflite" +# path during execution if `with_attention` is set to `true`. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkCpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:face_roi" +# input_side_packet: "WITH_ATTENTION:with_attention" +# output_stream: "LANDMARKS:face_landmarks" +# } + +type: "FaceLandmarkCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a face is located. +# (NormalizedRect) +input_stream: "ROI:roi" +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# 468 or 478 facial landmarks within the given ROI. (NormalizedLandmarkList) +# +# Number of landmarks depends on the WITH_ATTENTION flag. If it's `true` - then +# there will be 478 landmarks with refined lips, eyes and irises (10 extra +# landmarks are for irises), otherwise 468 non-refined landmarks are returned. +# +# NOTE: if a face is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:face_landmarks" + +# Transforms the input image into a 192x192 tensor. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + } + } +} + +# Loads the face landmarks TF Lite model. +node { + calculator: "FaceLandmarksModelLoader" + input_side_packet: "WITH_ATTENTION:with_attention" + output_side_packet: "MODEL:model" +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + input_side_packet: "MODEL:model" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { xnnpack {} } + } + } +} + +# Splits a vector of tensors into landmark tensors and face flag tensor. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:with_attention" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "face_flag_tensor" + options: { + [mediapipe.SwitchContainerOptions.ext] { + contained_node: { + calculator: "SplitTensorVectorCalculator" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } + } + contained_node: { + calculator: "SplitTensorVectorCalculator" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 6 } + ranges: { begin: 6 end: 7 } + } + } + } + } + } +} + +# Converts the face-flag tensor into a float that represents the confidence +# score of face presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:face_flag_tensor" + output_stream: "FLOAT:face_presence_score" + options { + [mediapipe.TensorsToFloatsCalculatorOptions.ext] { + activation: SIGMOID + } + } +} + +# Applies a threshold to the confidence score to determine whether a face is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:face_presence_score" + output_stream: "FLAG:face_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.5 + } + } +} + +# Drop landmarks tensors if face is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:face_presence" + output_stream: "ensured_landmark_tensors" +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:with_attention" + input_stream: "TENSORS:ensured_landmark_tensors" + output_stream: "LANDMARKS:landmarks" + options: { + [mediapipe.SwitchContainerOptions.ext] { + contained_node: { + calculator: "TensorsToFaceLandmarks" + } + contained_node: { + calculator: "TensorsToFaceLandmarksWithAttention" + } + } + } +} + +# Projects the landmarks from the cropped face image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt new file mode 100644 index 0000000..70a57b0 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_cpu.pbtxt @@ -0,0 +1,247 @@ +# MediaPipe graph to detect/predict face landmarks. (CPU input, and inference is +# executed on CPU.) This graph tries to skip face detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# It is required that "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# path during execution if `with_attention` is not set or set to `false`. +# +# It is required that "face_landmark_with_attention.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark_with_attention.tflite" +# path during execution if `with_attention` is set to `true`. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkFrontCpu" +# input_stream: "IMAGE:image" +# input_side_packet: "NUM_FACES:num_faces" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# input_side_packet: "WITH_ATTENTION:with_attention" +# output_stream: "LANDMARKS:multi_face_landmarks" +# } + +type: "FaceLandmarkFrontCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_face_rects_from_landmarks" + output_stream: "gated_prev_face_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_faces. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_face_rects_from_landmarks" + input_side_packet: "num_faces" + output_stream: "prev_has_enough_faces" +} + +# Drops the incoming image if enough faces have already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of face detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_faces" + output_stream: "gated_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects faces. +node { + calculator: "FaceDetectionShortRangeCpu" + input_stream: "IMAGE:gated_image" + output_stream: "DETECTIONS:all_face_detections" +} + +# Makes sure there are no more detections than the provided num_faces. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_face_detections" + output_stream: "face_detections" + input_side_packet: "num_faces" +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:gated_image" + output_stream: "SIZE:gated_image_size" +} + +# Outputs each element of face_detections at a fake timestamp for the rest of +# the graph to process. Clones the image size packet for each face_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:face_detections" + input_stream: "CLONE:gated_image_size" + output_stream: "ITEM:face_detection" + output_stream: "CLONE:detections_loop_image_size" + output_stream: "BATCH_END:detections_loop_end_timestamp" +} + +# Calculates region of interest based on face detections, so that can be used +# to detect landmarks. +node { + calculator: "FaceDetectionFrontDetectionToRoi" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:detections_loop_image_size" + output_stream: "ROI:face_rect_from_detection" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_detection" + input_stream: "BATCH_END:detections_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on face detections from the current image. This +# calculator ensures that the output face_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "face_rects_from_detections" + input_stream: "gated_prev_face_rects_from_landmarks" + output_stream: "face_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of face_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_face_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:face_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:face_rect" + output_stream: "CLONE:0:landmarks_loop_image" + output_stream: "CLONE:1:landmarks_loop_image_size" + output_stream: "BATCH_END:landmarks_loop_end_timestamp" +} + +# Detects face landmarks within specified region of interest of the image. +node { + calculator: "FaceLandmarkCpu" + input_stream: "IMAGE:landmarks_loop_image" + input_stream: "ROI:face_rect" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:face_landmarks" +} + +# Calculates region of interest based on face landmarks, so that can be reused +# for subsequent image. +node { + calculator: "FaceLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:landmarks_loop_image_size" + output_stream: "ROI:face_rect_from_landmarks" +} + +# Collects a set of landmarks for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:face_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:multi_face_landmarks" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_landmarks" +} + +# Caches face rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# face rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:face_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_face_rects_from_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_cpu_image.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_cpu_image.pbtxt new file mode 100644 index 0000000..7d0c46a --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_cpu_image.pbtxt @@ -0,0 +1,87 @@ +# MediaPipe graph to detect/predict face landmarks on CPU. + +type: "FaceLandmarkFrontCpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:multi_face_landmarks" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to ImageFrame for FaceLandmarkFrontCpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_CPU:raw_image_frame" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into FaceLandmarkFrontCpu. This maybe needed because OpenGL represents images +# assuming the image origin is at the bottom-left corner, whereas MediaPipe in +# general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:raw_image_frame" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE:image_frame" +} + +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:image_frame" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt new file mode 100644 index 0000000..fd89565 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_gpu.pbtxt @@ -0,0 +1,247 @@ +# MediaPipe graph to detect/predict face landmarks. (GPU input, and inference is +# executed on GPU.) This graph tries to skip face detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# It is required that "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# path during execution. +# +# It is required that "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# path during execution if `with_attention` is not set or set to `false`. +# +# It is required that "face_landmark_with_attention.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark_with_attention.tflite" +# path during execution if `with_attention` is set to `true`. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkFrontGpu" +# input_stream: "IMAGE:image" +# input_side_packet: "NUM_FACES:num_faces" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# input_side_packet: "WITH_ATTENTION:with_attention" +# output_stream: "LANDMARKS:multi_face_landmarks" +# } + +type: "FaceLandmarkFrontGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_face_rects_from_landmarks" + output_stream: "gated_prev_face_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_faces. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_face_rects_from_landmarks" + input_side_packet: "num_faces" + output_stream: "prev_has_enough_faces" +} + +# Drops the incoming image if enough faces have already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of face detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_faces" + output_stream: "gated_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects faces. +node { + calculator: "FaceDetectionShortRangeGpu" + input_stream: "IMAGE:gated_image" + output_stream: "DETECTIONS:all_face_detections" +} + +# Makes sure there are no more detections than the provided num_faces. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_face_detections" + output_stream: "face_detections" + input_side_packet: "num_faces" +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:gated_image" + output_stream: "SIZE:gated_image_size" +} + +# Outputs each element of face_detections at a fake timestamp for the rest of +# the graph to process. Clones the image size packet for each face_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:face_detections" + input_stream: "CLONE:gated_image_size" + output_stream: "ITEM:face_detection" + output_stream: "CLONE:detections_loop_image_size" + output_stream: "BATCH_END:detections_loop_end_timestamp" +} + +# Calculates region of interest based on face detections, so that can be used +# to detect landmarks. +node { + calculator: "FaceDetectionFrontDetectionToRoi" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:detections_loop_image_size" + output_stream: "ROI:face_rect_from_detection" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_detection" + input_stream: "BATCH_END:detections_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on face detections from the current image. This +# calculator ensures that the output face_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "face_rects_from_detections" + input_stream: "gated_prev_face_rects_from_landmarks" + output_stream: "face_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of face_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_face_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:face_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:face_rect" + output_stream: "CLONE:0:landmarks_loop_image" + output_stream: "CLONE:1:landmarks_loop_image_size" + output_stream: "BATCH_END:landmarks_loop_end_timestamp" +} + +# Detects face landmarks within specified region of interest of the image. +node { + calculator: "FaceLandmarkGpu" + input_stream: "IMAGE:landmarks_loop_image" + input_stream: "ROI:face_rect" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:face_landmarks" +} + +# Calculates region of interest based on face landmarks, so that can be reused +# for subsequent image. +node { + calculator: "FaceLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:landmarks_loop_image_size" + output_stream: "ROI:face_rect_from_landmarks" +} + +# Collects a set of landmarks for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:face_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:multi_face_landmarks" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_landmarks" +} + +# Caches face rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# face rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:face_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_face_rects_from_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_gpu_image.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_gpu_image.pbtxt new file mode 100644 index 0000000..31da4b8 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_gpu_image.pbtxt @@ -0,0 +1,87 @@ +# MediaPipe graph to detect/predict face landmarks on GPU. + +type: "FaceLandmarkFrontGpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:multi_face_landmarks" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to GpuBuffer for FaceLandmarkFrontGpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_GPU:raw_gpu_buffer" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into FaceLandmarkFrontGpu. This maybe needed because OpenGL represents images +# assuming the image origin is at the bottom-left corner, whereas MediaPipe in +# general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:raw_gpu_buffer" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE_GPU:gpu_buffer" +} + +node { + calculator: "FaceLandmarkFrontGpu" + input_stream: "IMAGE:gpu_buffer" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_side_model_cpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_side_model_cpu.pbtxt new file mode 100644 index 0000000..d3d26c0 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_side_model_cpu.pbtxt @@ -0,0 +1,224 @@ +# MediaPipe graph to detect/predict face landmarks. (CPU input, and inference is +# executed on CPU.) This graph tries to skip face detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkFrontSideModelCpu" +# input_stream: "IMAGE:image" +# input_side_packet: "NUM_FACES:num_faces" +# input_side_packet: "MODEL:0:face_detection_model" +# input_side_packet: "MODEL:1:face_landmark_model" +# output_stream: "LANDMARKS:multi_face_landmarks" +# } + +type: "FaceLandmarkFrontSideModelCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" +# TfLite model to detect faces. +# (std::unique_ptr>) +# NOTE: mediapipe/modules/face_detection/face_detection_short_range.tflite +# model only, can be passed here, otherwise - results are undefined. +input_side_packet: "MODEL:0:face_detection_model" +# TfLite model to detect face landmarks. +# (std::unique_ptr>) +# NOTE: mediapipe/modules/face_landmark/face_landmark.tflite model +# only, can be passed here, otherwise - results are undefined. +input_side_packet: "MODEL:1:face_landmark_model" + +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_faces. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:prev_face_rects_from_landmarks" + input_side_packet: "num_faces" + output_stream: "prev_has_enough_faces" +} + +# Drops the incoming image if FaceLandmarkCpu was able to identify face presence +# in the previous image. Otherwise, passes the incoming image through to trigger +# a new round of face detection in FaceDetectionShortRangeCpu. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_faces" + output_stream: "gated_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects faces. +node { + calculator: "FaceDetectionShortRangeSideModelCpu" + input_stream: "IMAGE:gated_image" + input_side_packet: "MODEL:face_detection_model" + output_stream: "DETECTIONS:all_face_detections" +} + +# Makes sure there are no more detections than the provided num_faces. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_face_detections" + output_stream: "face_detections" + input_side_packet: "num_faces" +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:gated_image" + output_stream: "SIZE:gated_image_size" +} + +# Outputs each element of face_detections at a fake timestamp for the rest of +# the graph to process. Clones the image size packet for each face_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:face_detections" + input_stream: "CLONE:gated_image_size" + output_stream: "ITEM:face_detection" + output_stream: "CLONE:detections_loop_image_size" + output_stream: "BATCH_END:detections_loop_end_timestamp" +} + +# Calculates region of interest based on face detections, so that can be used +# to detect landmarks. +node { + calculator: "FaceDetectionFrontDetectionToRoi" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:detections_loop_image_size" + output_stream: "ROI:face_rect_from_detection" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_detection" + input_stream: "BATCH_END:detections_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on face detections from the current image. This +# calculator ensures that the output face_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "face_rects_from_detections" + input_stream: "prev_face_rects_from_landmarks" + output_stream: "face_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of face_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_face_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:face_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:face_rect" + output_stream: "CLONE:0:landmarks_loop_image" + output_stream: "CLONE:1:landmarks_loop_image_size" + output_stream: "BATCH_END:landmarks_loop_end_timestamp" +} + +# Detects face landmarks within specified region of interest of the image. +node { + calculator: "FaceLandmarkSideModelCpu" + input_stream: "IMAGE:landmarks_loop_image" + input_stream: "ROI:face_rect" + input_side_packet: "MODEL:face_landmark_model" + output_stream: "LANDMARKS:face_landmarks" +} + +# Calculates region of interest based on face landmarks, so that can be reused +# for subsequent image. +node { + calculator: "FaceLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:landmarks_loop_image_size" + output_stream: "ROI:face_rect_from_landmarks" +} + +# Collects a set of landmarks for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:face_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:multi_face_landmarks" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_landmarks" +} + +# Caches face rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# face rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:face_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_face_rects_from_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_front_side_model_gpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_front_side_model_gpu.pbtxt new file mode 100644 index 0000000..9832c2f --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_front_side_model_gpu.pbtxt @@ -0,0 +1,224 @@ +# MediaPipe graph to detect/predict face landmarks. (GPU input, and inference is +# executed on GPU.) This graph tries to skip face detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkFrontSideModelGpu" +# input_stream: "IMAGE:image" +# input_side_packet: "NUM_FACES:num_faces" +# input_side_packet: "MODEL:0:face_detection_model" +# input_side_packet: "MODEL:1:face_landmark_model" +# output_stream: "LANDMARKS:multi_face_landmarks" +# } + +type: "FaceLandmarkFrontSideModelGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Max number of faces to detect/track. (int) +input_side_packet: "NUM_FACES:num_faces" +# TfLite model to detect faces. +# (std::unique_ptr>) +# NOTE: mediapipe/modules/face_detection/face_detection_short_range.tflite +# model only, can be passed here, otherwise - results are undefined. +input_side_packet: "MODEL:0:face_detection_model" +# TfLite model to detect face landmarks. +# (std::unique_ptr>) +# NOTE: mediapipe/modules/face_landmark/face_landmark.tflite model +# only, can be passed here, otherwise - results are undefined. +input_side_packet: "MODEL:1:face_landmark_model" + +# Collection of detected/predicted faces, each represented as a list of 468 face +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of faces detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_face_landmarks" + +# Extra outputs (for debugging, for instance). +# Detected faces. (std::vector) +output_stream: "DETECTIONS:face_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" +# Regions of interest calculated based on face detections. +# (std::vector) +output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_faces. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:prev_face_rects_from_landmarks" + input_side_packet: "num_faces" + output_stream: "prev_has_enough_faces" +} + +# Drops the incoming image if FaceLandmarkGpu was able to identify face presence +# in the previous image. Otherwise, passes the incoming image through to trigger +# a new round of face detection in FaceDetectionShortRangeGpu. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_faces" + output_stream: "gated_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects faces. +node { + calculator: "FaceDetectionShortRangeSideModelGpu" + input_stream: "IMAGE:gated_image" + input_side_packet: "MODEL:face_detection_model" + output_stream: "DETECTIONS:all_face_detections" +} + +# Makes sure there are no more detections than the provided num_faces. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_face_detections" + output_stream: "face_detections" + input_side_packet: "num_faces" +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:gated_image" + output_stream: "SIZE:gated_image_size" +} + +# Outputs each element of face_detections at a fake timestamp for the rest of +# the graph to process. Clones the image size packet for each face_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:face_detections" + input_stream: "CLONE:gated_image_size" + output_stream: "ITEM:face_detection" + output_stream: "CLONE:detections_loop_image_size" + output_stream: "BATCH_END:detections_loop_end_timestamp" +} + +# Calculates region of interest based on face detections, so that can be used +# to detect landmarks. +node { + calculator: "FaceDetectionFrontDetectionToRoi" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:detections_loop_image_size" + output_stream: "ROI:face_rect_from_detection" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_detection" + input_stream: "BATCH_END:detections_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on face detections from the current image. This +# calculator ensures that the output face_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "face_rects_from_detections" + input_stream: "prev_face_rects_from_landmarks" + output_stream: "face_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Calculate size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of face_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_face_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:face_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:face_rect" + output_stream: "CLONE:0:landmarks_loop_image" + output_stream: "CLONE:1:landmarks_loop_image_size" + output_stream: "BATCH_END:landmarks_loop_end_timestamp" +} + +# Detects face landmarks within specified region of interest of the image. +node { + calculator: "FaceLandmarkSideModelGpu" + input_stream: "IMAGE:landmarks_loop_image" + input_stream: "ROI:face_rect" + input_side_packet: "MODEL:face_landmark_model" + output_stream: "LANDMARKS:face_landmarks" +} + +# Calculates region of interest based on face landmarks, so that can be reused +# for subsequent image. +node { + calculator: "FaceLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "IMAGE_SIZE:landmarks_loop_image_size" + output_stream: "ROI:face_rect_from_landmarks" +} + +# Collects a set of landmarks for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:face_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:multi_face_landmarks" +} + +# Collects a NormalizedRect for each face into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:face_rect_from_landmarks" + input_stream: "BATCH_END:landmarks_loop_end_timestamp" + output_stream: "ITERABLE:face_rects_from_landmarks" +} + +# Caches face rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# face rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:face_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_face_rects_from_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt b/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt new file mode 100644 index 0000000..854ceaf --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_gpu.pbtxt @@ -0,0 +1,185 @@ +# MediaPipe graph to detect/predict face landmarks. (CPU input, and inference is +# executed on CPU.) +# +# It is required that "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# path during execution if `with_attention` is not set or set to `false`. +# +# It is required that "face_landmark_with_attention.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark_with_attention.tflite" +# path during execution if `with_attention` is set to `true`. +# +# EXAMPLE: +# node { +# calculator: "FaceLandmarkGpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:face_roi" +# input_side_packet: "WITH_ATTENTION:with_attention" +# output_stream: "LANDMARKS:face_landmarks" +# } + +type: "FaceLandmarkGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a face is located. +# (NormalizedRect) +input_stream: "ROI:roi" +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# 468 or 478 facial landmarks within the given ROI. (NormalizedLandmarkList) +# +# Number of landmarks depends on the WITH_ATTENTION flag. If it's `true` - then +# there will be 478 landmarks with refined lips, eyes and irises (10 extra +# landmarks are for irises), otherwise 468 non-refined landmarks are returned. +# +# NOTE: if a face is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:face_landmarks" + +# Transforms the input image into a 192x192 tensor. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + } + } +} + +# Loads the face landmarks TF Lite model. +node { + calculator: "FaceLandmarksModelLoader" + input_side_packet: "WITH_ATTENTION:with_attention" + output_side_packet: "MODEL:model" +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of GPU tensors representing, for instance, detection boxes/keypoints +# and scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + input_side_packet: "MODEL:model" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + # Do not remove. Used for generation of XNNPACK/NNAPI graphs. + } + } +} + +# Splits a vector of tensors into landmark tensors and face flag tensor. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:with_attention" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "face_flag_tensor" + options { + [mediapipe.SwitchContainerOptions.ext] { + contained_node: { + calculator: "SplitTensorVectorCalculator" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } + } + contained_node: { + calculator: "SplitTensorVectorCalculator" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 6 } + ranges: { begin: 6 end: 7 } + } + } + } + } + } +} + +# Converts the face-flag tensor into a float that represents the confidence +# score of face presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:face_flag_tensor" + output_stream: "FLOAT:face_presence_score" + options: { + [mediapipe.TensorsToFloatsCalculatorOptions.ext] { + activation: SIGMOID + } + } +} + +# Applies a threshold to the confidence score to determine whether a face is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:face_presence_score" + output_stream: "FLAG:face_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.5 + } + } +} + +# Drop landmarks tensors if face is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:face_presence" + output_stream: "ensured_landmark_tensors" +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:with_attention" + input_stream: "TENSORS:ensured_landmark_tensors" + output_stream: "LANDMARKS:landmarks" + options: { + [mediapipe.SwitchContainerOptions.ext] { + contained_node: { + calculator: "TensorsToFaceLandmarks" + } + contained_node: { + calculator: "TensorsToFaceLandmarksWithAttention" + } + } + } +} + +# Projects the landmarks from the cropped face image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/face_landmark/face_landmark_landmarks_to_roi.pbtxt b/mediapipe/modules/face_landmark/face_landmark_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..9f634b0 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmark_landmarks_to_roi.pbtxt @@ -0,0 +1,54 @@ +# MediaPipe graph to calculate face region of interest (ROI) from landmarks +# detected by "FaceLandmarkCpu" or "FaceLandmarkGpu". +# +# NOTE: this graph is subject to change and should not be used directly. + +type: "FaceLandmarkLandmarksToRoi" + +# Normalized landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:landmarks" +# Frame size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" +# ROI according to landmarks. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts face landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "DETECTION:face_detection" +} + +# Converts the face detection into a rectangle (normalized by image size) +# that encloses the face and is rotated such that the line connecting left side +# of the left eye and right side of the right eye is aligned with the X-axis of +# the rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:face_rect_from_landmarks" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 33 # Left side of left eye. + rotation_vector_end_keypoint_index: 263 # Right side of right eye. + rotation_vector_target_angle_degrees: 0 + } + } +} + +# Expands the face rectangle so that in the next video image it's likely to +# still contain the face even with some motion. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:face_rect_from_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.5 + scale_y: 1.5 + square_long: true + } + } +} diff --git a/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite b/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite new file mode 100755 index 0000000..fe0a93a Binary files /dev/null and b/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite differ diff --git a/mediapipe/modules/face_landmark/face_landmarks_model_loader.pbtxt b/mediapipe/modules/face_landmark/face_landmarks_model_loader.pbtxt new file mode 100644 index 0000000..ecac1a6 --- /dev/null +++ b/mediapipe/modules/face_landmark/face_landmarks_model_loader.pbtxt @@ -0,0 +1,58 @@ +# MediaPipe graph to load a selected face landmarks TF Lite model. + +type: "FaceLandmarksModelLoader" + +# Whether to run face mesh model with attention on lips and eyes. (bool) +# Attention provides more accuracy on lips and eye regions as well as iris +# landmarks. +input_side_packet: "WITH_ATTENTION:with_attention" + +# TF Lite model represented as a FlatBuffer. +# (std::unique_ptr>) +output_side_packet: "MODEL:model" + +# Determines path to the desired face landmark model file based on specification +# in the input side packet. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:with_attention" + output_side_packet: "PACKET:model_path" + options: { + [mediapipe.SwitchContainerOptions.ext] { + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/face_landmark/face_landmark.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/face_landmark/face_landmark_with_attention.tflite" + } + } + } + } + } + } +} + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:model_path" + output_side_packet: "CONTENTS:model_blob" +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:model_blob" + output_side_packet: "MODEL:model" +} diff --git a/mediapipe/modules/face_landmark/tensors_to_face_landmarks.pbtxt b/mediapipe/modules/face_landmark/tensors_to_face_landmarks.pbtxt new file mode 100644 index 0000000..0adbdf3 --- /dev/null +++ b/mediapipe/modules/face_landmark/tensors_to_face_landmarks.pbtxt @@ -0,0 +1,24 @@ +# MediaPipe graph to transform single tensor into 468 facial landmarks. + +type: "TensorsToFaceLandmarks" + +# Vector with a single tensor that contains 468 landmarks. (std::vector) +input_stream: "TENSORS:tensors" + +# 468 facial landmarks (NormalizedLandmarkList) +output_stream: "LANDMARKS:landmarks" + +# Decodes the landmark tensors into a vector of lanmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 468 + input_image_width: 192 + input_image_height: 192 + } + } +} diff --git a/mediapipe/modules/face_landmark/tensors_to_face_landmarks_with_attention.pbtxt b/mediapipe/modules/face_landmark/tensors_to_face_landmarks_with_attention.pbtxt new file mode 100644 index 0000000..4f9b994 --- /dev/null +++ b/mediapipe/modules/face_landmark/tensors_to_face_landmarks_with_attention.pbtxt @@ -0,0 +1,299 @@ +# MediaPipe graph to transform model output tensors into 478 facial landmarks +# with refined lips, eyes and irises. + +type: "TensorsToFaceLandmarksWithAttention" + +# Vector with a six tensors to parse landmarks from. (std::vector) +# Landmark tensors order: +# - mesh_tensor +# - lips_tensor +# - left_eye_tensor +# - right_eye_tensor +# - left_iris_tensor +# - right_iris_tensor +input_stream: "TENSORS:tensors" + +# 478 facial landmarks (NormalizedLandmarkList) +output_stream: "LANDMARKS:landmarks" + +# Splits a vector of tensors into multiple vectors. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "tensors" + output_stream: "mesh_tensor" + output_stream: "lips_tensor" + output_stream: "left_eye_tensor" + output_stream: "right_eye_tensor" + output_stream: "left_iris_tensor" + output_stream: "right_iris_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + ranges: { begin: 2 end: 3 } + ranges: { begin: 3 end: 4 } + ranges: { begin: 4 end: 5 } + ranges: { begin: 5 end: 6 } + } + } +} + +# Decodes mesh landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:mesh_tensor" + output_stream: "NORM_LANDMARKS:mesh_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 468 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Decodes lips landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:lips_tensor" + output_stream: "NORM_LANDMARKS:lips_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 80 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Decodes left eye landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:left_eye_tensor" + output_stream: "NORM_LANDMARKS:left_eye_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 71 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Decodes right eye landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:right_eye_tensor" + output_stream: "NORM_LANDMARKS:right_eye_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 71 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Decodes left iris landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:left_iris_tensor" + output_stream: "NORM_LANDMARKS:left_iris_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 5 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Decodes right iris landmarks tensor into a vector of normalized lanmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:right_iris_tensor" + output_stream: "NORM_LANDMARKS:right_iris_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 5 + input_image_width: 192 + input_image_height: 192 + } + } +} + +# Refine mesh landmarks with lips, eyes and irises. +node { + calculator: "LandmarksRefinementCalculator" + input_stream: "LANDMARKS:0:mesh_landmarks" + input_stream: "LANDMARKS:1:lips_landmarks" + input_stream: "LANDMARKS:2:left_eye_landmarks" + input_stream: "LANDMARKS:3:right_eye_landmarks" + input_stream: "LANDMARKS:4:left_iris_landmarks" + input_stream: "LANDMARKS:5:right_iris_landmarks" + output_stream: "REFINED_LANDMARKS:landmarks" + options: { + [mediapipe.LandmarksRefinementCalculatorOptions.ext] { + # 0 - mesh + refinement: { + indexes_mapping: [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, + 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, + 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, + 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, + 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, + 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, + 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, + 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, + 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, + 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, + 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, + 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, + 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, + 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, + 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, + 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, + 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, + 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, + 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467 + ] + z_refinement: { copy {} } + } + # 1 - lips + refinement: { + indexes_mapping: [ + # Lower outer. + 61, 146, 91, 181, 84, 17, 314, 405, 321, 375, 291, + # Upper outer (excluding corners). + 185, 40, 39, 37, 0, 267, 269, 270, 409, + # Lower inner. + 78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308, + # Upper inner (excluding corners). + 191, 80, 81, 82, 13, 312, 311, 310, 415, + # Lower semi-outer. + 76, 77, 90, 180, 85, 16, 315, 404, 320, 307, 306, + # Upper semi-outer (excluding corners). + 184, 74, 73, 72, 11, 302, 303, 304, 408, + # Lower semi-inner. + 62, 96, 89, 179, 86, 15, 316, 403, 319, 325, 292, + # Upper semi-inner (excluding corners). + 183, 42, 41, 38, 12, 268, 271, 272, 407 + ] + z_refinement: { none {} } + } + # 2 - left eye + refinement: { + indexes_mapping: [ + # Lower contour. + 33, 7, 163, 144, 145, 153, 154, 155, 133, + # upper contour (excluding corners). + 246, 161, 160, 159, 158, 157, 173, + # Halo x2 lower contour. + 130, 25, 110, 24, 23, 22, 26, 112, 243, + # Halo x2 upper contour (excluding corners). + 247, 30, 29, 27, 28, 56, 190, + # Halo x3 lower contour. + 226, 31, 228, 229, 230, 231, 232, 233, 244, + # Halo x3 upper contour (excluding corners). + 113, 225, 224, 223, 222, 221, 189, + # Halo x4 upper contour (no lower because of mesh structure) or + # eyebrow inner contour. + 35, 124, 46, 53, 52, 65, + # Halo x5 lower contour. + 143, 111, 117, 118, 119, 120, 121, 128, 245, + # Halo x5 upper contour (excluding corners) or eyebrow outer contour. + 156, 70, 63, 105, 66, 107, 55, 193 + ] + z_refinement: { none {} } + } + # 3 - right eye + refinement: { + indexes_mapping: [ + # Lower contour. + 263, 249, 390, 373, 374, 380, 381, 382, 362, + # Upper contour (excluding corners). + 466, 388, 387, 386, 385, 384, 398, + # Halo x2 lower contour. + 359, 255, 339, 254, 253, 252, 256, 341, 463, + # Halo x2 upper contour (excluding corners). + 467, 260, 259, 257, 258, 286, 414, + # Halo x3 lower contour. + 446, 261, 448, 449, 450, 451, 452, 453, 464, + # Halo x3 upper contour (excluding corners). + 342, 445, 444, 443, 442, 441, 413, + # Halo x4 upper contour (no lower because of mesh structure) or + # eyebrow inner contour. + 265, 353, 276, 283, 282, 295, + # Halo x5 lower contour. + 372, 340, 346, 347, 348, 349, 350, 357, 465, + # Halo x5 upper contour (excluding corners) or eyebrow outer contour. + 383, 300, 293, 334, 296, 336, 285, 417 + ] + z_refinement: { none {} } + } + # 4 - left iris + refinement: { + indexes_mapping: [ + # Center. + 468, + # Iris right edge. + 469, + # Iris top edge. + 470, + # Iris left edge. + 471, + # Iris bottom edge. + 472 + ] + z_refinement: { + assign_average: { + indexes_for_average: [ + # Lower contour. + 33, 7, 163, 144, 145, 153, 154, 155, 133, + # Upper contour (excluding corners). + 246, 161, 160, 159, 158, 157, 173 + ] + } + } + } + # 5 - right iris + refinement: { + indexes_mapping: [ + # Center. + 473, + # Iris right edge. + 474, + # Iris top edge. + 475, + # Iris left edge. + 476, + # Iris bottom edge. + 477 + ] + z_refinement: { + assign_average: { + indexes_for_average: [ + # Lower contour. + 263, 249, 390, 373, 374, 380, 381, 382, 362, + # Upper contour (excluding corners). + 466, 388, 387, 386, 385, 384, 398 + ] + } + } + } + } + } +} diff --git a/mediapipe/modules/hand_landmark/BUILD b/mediapipe/modules/hand_landmark/BUILD new file mode 100644 index 0000000..b28dc78 --- /dev/null +++ b/mediapipe/modules/hand_landmark/BUILD @@ -0,0 +1,171 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "hand_landmark_full.tflite", + "hand_landmark_lite.tflite", + "handedness.txt", +]) + +mediapipe_simple_subgraph( + name = "hand_landmark_model_loader", + graph = "hand_landmark_model_loader.pbtxt", + register_as = "HandLandmarkModelLoader", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_cpu", + graph = "hand_landmark_cpu.pbtxt", + register_as = "HandLandmarkCpu", + deps = [ + ":hand_landmark_model_loader", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/calculators/util:world_landmark_projection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_gpu", + graph = "hand_landmark_gpu.pbtxt", + register_as = "HandLandmarkGpu", + deps = [ + ":hand_landmark_model_loader", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/calculators/util:world_landmark_projection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_tracking_gpu", + graph = "hand_landmark_tracking_gpu.pbtxt", + register_as = "HandLandmarkTrackingGpu", + deps = [ + ":hand_landmark_gpu", + ":hand_landmark_landmarks_to_roi", + ":palm_detection_detection_to_roi", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:filter_collection_calculator", + "//mediapipe/modules/palm_detection:palm_detection_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_tracking_cpu_image", + graph = "hand_landmark_tracking_cpu_image.pbtxt", + register_as = "HandLandmarkTrackingCpuImage", + deps = [ + ":hand_landmark_tracking_cpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_tracking_gpu_image", + graph = "hand_landmark_tracking_gpu_image.pbtxt", + register_as = "HandLandmarkTrackingGpuImage", + deps = [ + ":hand_landmark_tracking_gpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_tracking_cpu", + graph = "hand_landmark_tracking_cpu.pbtxt", + register_as = "HandLandmarkTrackingCpu", + deps = [ + ":hand_landmark_cpu", + ":hand_landmark_landmarks_to_roi", + ":palm_detection_detection_to_roi", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:filter_collection_calculator", + "//mediapipe/modules/palm_detection:palm_detection_cpu", + ], +) + +mediapipe_simple_subgraph( + name = "palm_detection_detection_to_roi", + graph = "palm_detection_detection_to_roi.pbtxt", + register_as = "PalmDetectionDetectionToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmark_landmarks_to_roi", + graph = "hand_landmark_landmarks_to_roi.pbtxt", + register_as = "HandLandmarkLandmarksToRoi", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/modules/hand_landmark/calculators:hand_landmarks_to_rect_calculator", + ], +) diff --git a/mediapipe/modules/hand_landmark/README.md b/mediapipe/modules/hand_landmark/README.md new file mode 100644 index 0000000..31fe6f7 --- /dev/null +++ b/mediapipe/modules/hand_landmark/README.md @@ -0,0 +1,8 @@ +# hand_landmark + +Subgraphs|Details +:--- | :--- +[`HandLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt)| Detects landmarks of a single hand. (CPU input.) +[`HandLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt)| Detects landmarks of a single hand. (GPU input.) +[`HandLandmarkTrackingCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt)| Detects and tracks landmarks of multiple hands. (CPU input.) +[`HandLandmarkTrackingGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt)| Detects and tracks landmarks of multiple hands. (GPU input.) diff --git a/mediapipe/modules/hand_landmark/calculators/BUILD b/mediapipe/modules/hand_landmark/calculators/BUILD new file mode 100644 index 0000000..b2a8efe --- /dev/null +++ b/mediapipe/modules/hand_landmark/calculators/BUILD @@ -0,0 +1,33 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "hand_landmarks_to_rect_calculator", + srcs = ["hand_landmarks_to_rect_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:location_data_cc_proto", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + ], + alwayslink = 1, +) diff --git a/mediapipe/modules/hand_landmark/calculators/hand_landmarks_to_rect_calculator.cc b/mediapipe/modules/hand_landmark/calculators/hand_landmarks_to_rect_calculator.cc new file mode 100644 index 0000000..3e3f5c8 --- /dev/null +++ b/mediapipe/modules/hand_landmark/calculators/hand_landmarks_to_rect_calculator.cc @@ -0,0 +1,167 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include + +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" + +namespace mediapipe { + +namespace { + +constexpr char kNormalizedLandmarksTag[] = "NORM_LANDMARKS"; +constexpr char kNormRectTag[] = "NORM_RECT"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr int kWristJoint = 0; +constexpr int kMiddleFingerPIPJoint = 6; +constexpr int kIndexFingerPIPJoint = 4; +constexpr int kRingFingerPIPJoint = 8; +constexpr float kTargetAngle = M_PI * 0.5f; + +inline float NormalizeRadians(float angle) { + return angle - 2 * M_PI * std::floor((angle - (-M_PI)) / (2 * M_PI)); +} + +float ComputeRotation(const NormalizedLandmarkList& landmarks, + const std::pair& image_size) { + const float x0 = landmarks.landmark(kWristJoint).x() * image_size.first; + const float y0 = landmarks.landmark(kWristJoint).y() * image_size.second; + + float x1 = (landmarks.landmark(kIndexFingerPIPJoint).x() + + landmarks.landmark(kRingFingerPIPJoint).x()) / + 2.f; + float y1 = (landmarks.landmark(kIndexFingerPIPJoint).y() + + landmarks.landmark(kRingFingerPIPJoint).y()) / + 2.f; + x1 = (x1 + landmarks.landmark(kMiddleFingerPIPJoint).x()) / 2.f * + image_size.first; + y1 = (y1 + landmarks.landmark(kMiddleFingerPIPJoint).y()) / 2.f * + image_size.second; + + const float rotation = + NormalizeRadians(kTargetAngle - std::atan2(-(y1 - y0), x1 - x0)); + return rotation; +} + +absl::Status NormalizedLandmarkListToRect( + const NormalizedLandmarkList& landmarks, + const std::pair& image_size, NormalizedRect* rect) { + const float rotation = ComputeRotation(landmarks, image_size); + const float reverse_angle = NormalizeRadians(-rotation); + + // Find boundaries of landmarks. + float max_x = std::numeric_limits::min(); + float max_y = std::numeric_limits::min(); + float min_x = std::numeric_limits::max(); + float min_y = std::numeric_limits::max(); + for (int i = 0; i < landmarks.landmark_size(); ++i) { + max_x = std::max(max_x, landmarks.landmark(i).x()); + max_y = std::max(max_y, landmarks.landmark(i).y()); + min_x = std::min(min_x, landmarks.landmark(i).x()); + min_y = std::min(min_y, landmarks.landmark(i).y()); + } + const float axis_aligned_center_x = (max_x + min_x) / 2.f; + const float axis_aligned_center_y = (max_y + min_y) / 2.f; + + // Find boundaries of rotated landmarks. + max_x = std::numeric_limits::min(); + max_y = std::numeric_limits::min(); + min_x = std::numeric_limits::max(); + min_y = std::numeric_limits::max(); + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const float original_x = + (landmarks.landmark(i).x() - axis_aligned_center_x) * image_size.first; + const float original_y = + (landmarks.landmark(i).y() - axis_aligned_center_y) * image_size.second; + + const float projected_x = original_x * std::cos(reverse_angle) - + original_y * std::sin(reverse_angle); + const float projected_y = original_x * std::sin(reverse_angle) + + original_y * std::cos(reverse_angle); + + max_x = std::max(max_x, projected_x); + max_y = std::max(max_y, projected_y); + min_x = std::min(min_x, projected_x); + min_y = std::min(min_y, projected_y); + } + const float projected_center_x = (max_x + min_x) / 2.f; + const float projected_center_y = (max_y + min_y) / 2.f; + + const float center_x = projected_center_x * std::cos(rotation) - + projected_center_y * std::sin(rotation) + + image_size.first * axis_aligned_center_x; + const float center_y = projected_center_x * std::sin(rotation) + + projected_center_y * std::cos(rotation) + + image_size.second * axis_aligned_center_y; + const float width = (max_x - min_x) / image_size.first; + const float height = (max_y - min_y) / image_size.second; + + rect->set_x_center(center_x / image_size.first); + rect->set_y_center(center_y / image_size.second); + rect->set_width(width); + rect->set_height(height); + rect->set_rotation(rotation); + + return absl::OkStatus(); +} + +} // namespace + +// A calculator that converts subset of hand landmarks to a bounding box +// NormalizedRect. The rotation angle of the bounding box is computed based on +// 1) the wrist joint and 2) the average of PIP joints of index finger, middle +// finger and ring finger. After rotation, the vector from the wrist to the mean +// of PIP joints is expected to be vertical with wrist at the bottom and the +// mean of PIP joints at the top. +class HandLandmarksToRectCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kNormalizedLandmarksTag).Set(); + cc->Inputs().Tag(kImageSizeTag).Set>(); + cc->Outputs().Tag(kNormRectTag).Set(); + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override { + if (cc->Inputs().Tag(kNormalizedLandmarksTag).IsEmpty()) { + return absl::OkStatus(); + } + RET_CHECK(!cc->Inputs().Tag(kImageSizeTag).IsEmpty()); + + std::pair image_size = + cc->Inputs().Tag(kImageSizeTag).Get>(); + const auto& landmarks = + cc->Inputs().Tag(kNormalizedLandmarksTag).Get(); + auto output_rect = absl::make_unique(); + MP_RETURN_IF_ERROR( + NormalizedLandmarkListToRect(landmarks, image_size, output_rect.get())); + cc->Outputs() + .Tag(kNormRectTag) + .Add(output_rect.release(), cc->InputTimestamp()); + + return absl::OkStatus(); + } +}; +REGISTER_CALCULATOR(HandLandmarksToRectCalculator); + +} // namespace mediapipe diff --git a/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt new file mode 100644 index 0000000..6ecbfad --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_cpu.pbtxt @@ -0,0 +1,219 @@ +# MediaPipe graph to detect/predict hand landmarks on CPU. + +type: "HandLandmarkCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a palm/hand is located. +# (NormalizedRect) +input_stream: "ROI:hand_rect" + +# Complexity of the hand landmark model: 0 or 1. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# 21 hand landmarks within the given ROI. (NormalizedLandmarkList) +# NOTE: if a hand is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:hand_landmarks" + +# Hand world landmarks within the given ROI. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the given ROI. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:hand_world_landmarks" + +# Handedness of the detected hand (i.e. is hand left or right). +# (ClassificationList) +output_stream: "HANDEDNESS:handedness" + +# Transforms a region of image into a 224x224 tensor while keeping the aspect +# ratio, and therefore may result in potential letterboxing. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + input_stream: "NORM_RECT:hand_rect" + output_stream: "TENSORS:input_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + } + } +} + +# Loads the hand landmark TF Lite model. +node { + calculator: "HandLandmarkModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_side_packet: "MODEL:model" + input_stream: "TENSORS:input_tensor" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { + xnnpack {} + } + } + } +} + +# Splits a vector of tensors to multiple vectors according to the ranges +# specified in option. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "hand_flag_tensor" + output_stream: "handedness_tensor" + output_stream: "world_landmark_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + ranges: { begin: 2 end: 3 } + ranges: { begin: 3 end: 4 } + } + } +} + +# Converts the hand-flag tensor into a float that represents the confidence +# score of hand presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:hand_flag_tensor" + output_stream: "FLOAT:hand_presence_score" +} + +# Applies a threshold to the confidence score to determine whether a hand is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:hand_presence_score" + output_stream: "FLAG:hand_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.5 + } + } +} + +# Drops handedness tensor if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "handedness_tensor" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_handedness_tensor" +} + +# Converts the handedness tensor into a float that represents the classification +# score of handedness. +node { + calculator: "TensorsToClassificationCalculator" + input_stream: "TENSORS:ensured_handedness_tensor" + output_stream: "CLASSIFICATIONS:handedness" + options: { + [mediapipe.TensorsToClassificationCalculatorOptions.ext] { + top_k: 1 + label_map_path: "mediapipe/modules/hand_landmark/handedness.txt" + binary_classification: true + } + } +} + +# Drops landmarks tensors if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_landmark_tensors" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 21 + input_image_width: 224 + input_image_height: 224 + # The additional scaling factor is used to account for the Z coordinate + # distribution in the training data. + normalize_z: 0.4 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed hand +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (hand +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:hand_rect" + output_stream: "NORM_LANDMARKS:hand_landmarks" +} + +# Drops world landmarks tensors if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "world_landmark_tensor" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_world_landmark_tensor" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_world_landmark_tensor" + output_stream: "LANDMARKS:unprojected_world_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 21 + } + } +} + +# Projects the world landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "WorldLandmarkProjectionCalculator" + input_stream: "LANDMARKS:unprojected_world_landmarks" + input_stream: "NORM_RECT:hand_rect" + output_stream: "LANDMARKS:hand_world_landmarks" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_full.tflite b/mediapipe/modules/hand_landmark/hand_landmark_full.tflite new file mode 100755 index 0000000..a2b0114 Binary files /dev/null and b/mediapipe/modules/hand_landmark/hand_landmark_full.tflite differ diff --git a/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt new file mode 100644 index 0000000..033ad44 --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_gpu.pbtxt @@ -0,0 +1,213 @@ +# MediaPipe graph to detect/predict hand landmarks on CPU. + +type: "HandLandmarkGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a palm/hand is located. +# (NormalizedRect) +input_stream: "ROI:hand_rect" + +# Complexity of the hand landmark model: 0 or 1. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# 21 hand landmarks within the given ROI. (NormalizedLandmarkList) +# NOTE: if a hand is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:hand_landmarks" + +# Hand world landmarks within the given ROI. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the given ROI. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:hand_world_landmarks" + +# Handedness of the detected hand (i.e. is hand left or right). +# (ClassificationList) +output_stream: "HANDEDNESS:handedness" + +# Transforms a region of image into a 224x224 tensor while keeping the aspect +# ratio, and therefore may result in potential letterboxing. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + input_stream: "NORM_RECT:hand_rect" + output_stream: "TENSORS:input_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + } + } +} + +# Loads the hand landmark TF Lite model. +node { + calculator: "HandLandmarkModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_side_packet: "MODEL:model" + input_stream: "TENSORS:input_tensor" + output_stream: "TENSORS:output_tensors" +} + +# Splits a vector of tensors to multiple vectors according to the ranges +# specified in option. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "hand_flag_tensor" + output_stream: "handedness_tensor" + output_stream: "world_landmark_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + ranges: { begin: 2 end: 3 } + ranges: { begin: 3 end: 4 } + } + } +} + +# Converts the hand-flag tensor into a float that represents the confidence +# score of hand presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:hand_flag_tensor" + output_stream: "FLOAT:hand_presence_score" +} + +# Applies a threshold to the confidence score to determine whether a hand is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:hand_presence_score" + output_stream: "FLAG:hand_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.5 + } + } +} + +# Drops handedness tensor if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "handedness_tensor" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_handedness_tensor" +} + +# Converts the handedness tensor into a float that represents the classification +# score of handedness. +node { + calculator: "TensorsToClassificationCalculator" + input_stream: "TENSORS:ensured_handedness_tensor" + output_stream: "CLASSIFICATIONS:handedness" + options: { + [mediapipe.TensorsToClassificationCalculatorOptions.ext] { + top_k: 1 + label_map_path: "mediapipe/modules/hand_landmark/handedness.txt" + binary_classification: true + } + } +} + +# Drops landmarks tensors if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_landmark_tensors" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 21 + input_image_width: 224 + input_image_height: 224 + # The additional scaling factor is used to account for the Z coordinate + # distribution in the training data. + normalize_z: 0.4 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed hand +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (hand +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:hand_rect" + output_stream: "NORM_LANDMARKS:hand_landmarks" +} + +# Drops world landmarks tensors if hand is not present. +node { + calculator: "GateCalculator" + input_stream: "world_landmark_tensor" + input_stream: "ALLOW:hand_presence" + output_stream: "ensured_world_landmark_tensor" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_world_landmark_tensor" + output_stream: "LANDMARKS:unprojected_world_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 21 + } + } +} + +# Projects the world landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "WorldLandmarkProjectionCalculator" + input_stream: "LANDMARKS:unprojected_world_landmarks" + input_stream: "NORM_RECT:hand_rect" + output_stream: "LANDMARKS:hand_world_landmarks" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_landmarks_to_roi.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..1d82d76 --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_landmarks_to_roi.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph to calculate hand region of interest (ROI) from landmarks +# detected by "HandLandmarkCpu" or "HandLandmarkGpu". + +type: "HandLandmarkLandmarksToRoi" + +# Normalized landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:landmarks" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI according to landmarks. (NormalizedRect) +output_stream: "ROI:roi" + +# Extracts a subset of the hand landmarks that are relatively more stable across +# frames (e.g. comparing to finger tips) for computing the bounding box. The box +# will later be expanded to contain the entire hand. In this approach, it is +# more robust to drastically changing hand size. +# The landmarks extracted are: wrist, MCP/PIP of five fingers. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "landmarks" + output_stream: "partial_landmarks" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 4 } + ranges: { begin: 5 end: 7 } + ranges: { begin: 9 end: 11 } + ranges: { begin: 13 end: 15 } + ranges: { begin: 17 end: 19 } + combine_outputs: true + } + } +} + +# Converts the hand landmarks into a rectangle (normalized by image size) +# that encloses the hand. The calculator uses a subset of all hand landmarks +# extracted from SplitNormalizedLandmarkListCalculator above to +# calculate the bounding box and the rotation of the output rectangle. Please +# see the comments in the calculator for more detail. +node { + calculator: "HandLandmarksToRectCalculator" + input_stream: "NORM_LANDMARKS:partial_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:hand_rect_from_landmarks" +} + +# Expands the hand rectangle so that the box contains the entire hand and it's +# big enough so that it's likely to still contain the hand even with some motion +# in the next video frame . +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:hand_rect_from_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.0 + scale_y: 2.0 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite b/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite new file mode 100755 index 0000000..0a0a2ba Binary files /dev/null and b/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite differ diff --git a/mediapipe/modules/hand_landmark/hand_landmark_model_loader.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_model_loader.pbtxt new file mode 100644 index 0000000..c9ecf8a --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_model_loader.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph to load a selected hand landmark TF Lite model. + +type: "HandLandmarkModelLoader" + +# Complexity of the hand landmark model: 0 or 1. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# TF Lite model represented as a FlatBuffer. +# (std::unique_ptr>) +output_side_packet: "MODEL:model" + +# Determines path to the desired pose landmark model file. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_complexity" + output_side_packet: "PACKET:model_path" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 1 + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/hand_landmark/hand_landmark_lite.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/hand_landmark/hand_landmark_full.tflite" + } + } + } + } + } + } +} + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:model_path" + output_side_packet: "CONTENTS:model_blob" + options: { + [mediapipe.LocalFileContentsCalculatorOptions.ext]: { + text_mode: false + } + } +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:model_blob" + output_side_packet: "MODEL:model" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt new file mode 100644 index 0000000..2ee8316 --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu.pbtxt @@ -0,0 +1,271 @@ +# MediaPipe graph to detect/predict hand landmarks on CPU. +# +# The procedure is done in two steps: +# - locate palms/hands +# - detect landmarks for each palm/hand. +# This graph tries to skip palm detection as much as possible by reusing +# previously detected/predicted landmarks for new images. + +type: "HandLandmarkTrackingCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Max number of hands to detect/track. (int) +input_side_packet: "NUM_HANDS:num_hands" + +# Complexity of hand landmark and palm detection models: 0 or 1. Accuracy as +# well as inference latency generally go up with the model complexity. If +# unspecified, functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of hands detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_hand_landmarks" + +# Collection of detected/predicted hand world landmarks. +# (std::vector) +# +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the hand bounding box calculated from the landmarks. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + +# Collection of handedness of the detected hands (i.e. is hand left or right), +# each represented as a ClassificationList proto with a single Classification +# entry. (std::vector) +# Note that handedness is determined assuming the input image is mirrored, +# i.e., taken with a front-facing/selfie camera with images flipped +# horizontally. +output_stream: "HANDEDNESS:multi_handedness" + +# Extra outputs (for debugging, for instance). +# Detected palms. (std::vector) +output_stream: "PALM_DETECTIONS:palm_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" +# Regions of interest calculated based on palm detections. +# (std::vector) +output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_hand_rects_from_landmarks" + output_stream: "gated_prev_hand_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_hands. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_hand_rects_from_landmarks" + input_side_packet: "num_hands" + output_stream: "prev_has_enough_hands" +} + +# Drops the incoming image if enough hands have already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of palm detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_hands" + output_stream: "palm_detection_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects palms. +node { + calculator: "PalmDetectionCpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_stream: "IMAGE:palm_detection_image" + output_stream: "DETECTIONS:all_palm_detections" +} + +# Makes sure there are no more detections than the provided num_hands. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_palm_detections" + output_stream: "palm_detections" + input_side_packet: "num_hands" +} + +# Extracts image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:palm_detection_image" + output_stream: "SIZE:palm_detection_image_size" +} + +# Outputs each element of palm_detections at a fake timestamp for the rest of +# the graph to process. Clones the image size packet for each palm_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:palm_detections" + input_stream: "CLONE:palm_detection_image_size" + output_stream: "ITEM:palm_detection" + output_stream: "CLONE:image_size_for_palms" + output_stream: "BATCH_END:palm_detections_timestamp" +} + +# Calculates region of interest (ROI) based on the specified palm. +node { + calculator: "PalmDetectionDetectionToRoi" + input_stream: "DETECTION:palm_detection" + input_stream: "IMAGE_SIZE:image_size_for_palms" + output_stream: "ROI:hand_rect_from_palm_detection" +} + +# Collects a NormalizedRect for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:hand_rect_from_palm_detection" + input_stream: "BATCH_END:palm_detections_timestamp" + output_stream: "ITERABLE:hand_rects_from_palm_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on palm detections from the current image. This +# calculator ensures that the output hand_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "hand_rects_from_palm_detections" + input_stream: "gated_prev_hand_rects_from_landmarks" + output_stream: "hand_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Extracts image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of hand_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_hand_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:hand_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:single_hand_rect" + output_stream: "CLONE:0:image_for_landmarks" + output_stream: "CLONE:1:image_size_for_landmarks" + output_stream: "BATCH_END:hand_rects_timestamp" +} + +# Detect hand landmarks for the specific hand rect. +node { + calculator: "HandLandmarkCpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_stream: "IMAGE:image_for_landmarks" + input_stream: "ROI:single_hand_rect" + output_stream: "LANDMARKS:single_hand_landmarks" + output_stream: "WORLD_LANDMARKS:single_hand_world_landmarks" + output_stream: "HANDEDNESS:single_handedness" +} + +# Collects the handedness for each single hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs a vector of ClassificationList at the BATCH_END +# timestamp. +node { + calculator: "EndLoopClassificationListCalculator" + input_stream: "ITEM:single_handedness" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_handedness" +} + +# Calculate region of interest (ROI) based on detected hand landmarks to reuse +# on the subsequent runs of the graph. +node { + calculator: "HandLandmarkLandmarksToRoi" + input_stream: "IMAGE_SIZE:image_size_for_landmarks" + input_stream: "LANDMARKS:single_hand_landmarks" + output_stream: "ROI:single_hand_rect_from_landmarks" +} + +# Collects a set of landmarks for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:single_hand_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_hand_landmarks" +} + +# Collects a set of world landmarks for each hand into a vector. Upon receiving +# the BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopLandmarkListVectorCalculator" + input_stream: "ITEM:single_hand_world_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_hand_world_landmarks" +} + +# Collects a NormalizedRect for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:single_hand_rect_from_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:hand_rects_from_landmarks" +} + +# Caches hand rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# hand rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:hand_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_hand_rects_from_landmarks" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu_image.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu_image.pbtxt new file mode 100644 index 0000000..0bdabb9 --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_tracking_cpu_image.pbtxt @@ -0,0 +1,116 @@ +# MediaPipe graph to detect/predict hand landmarks on CPU. +# +# The procedure is done in two steps: +# - locate palms/hands +# - detect landmarks for each palm/hand. +# This graph tries to skip palm detection as much as possible by reusing +# previously detected/predicted landmarks for new images. + +type: "HandLandmarkTrackingCpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# Max number of hands to detect/track. (int) +input_side_packet: "NUM_HANDS:num_hands" + +# Complexity of hand landmark and palm detection models: 0 or 1. Accuracy as +# well as inference latency generally go up with the model complexity. If +# unspecified, functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" + +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of hands detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_hand_landmarks" + +# Collection of detected/predicted hand world landmarks. +# (std::vector) +# +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the hand bounding box calculated from the landmarks. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + +# Collection of handedness of the detected hands (i.e. is hand left or right), +# each represented as a ClassificationList proto with a single Classification +# entry. (std::vector) +# Note that handedness is determined assuming the input image is mirrored, +# i.e., taken with a front-facing/selfie camera with images flipped +# horizontally. +output_stream: "HANDEDNESS:multi_handedness" + +# Extra outputs (for debugging, for instance). +# Detected palms. (std::vector) +output_stream: "PALM_DETECTIONS:palm_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" +# Regions of interest calculated based on palm detections. +# (std::vector) +output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:multi_hand_landmarks" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to ImageFrame for HandLandmarkTrackingCpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_CPU:raw_image_frame" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into HandLandmarkTrackingCpu. This maybe needed because OpenGL represents +# images assuming the image origin is at the bottom-left corner, whereas +# MediaPipe in general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:raw_image_frame" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE:image_frame" +} + +node { + calculator: "HandLandmarkTrackingCpu" + input_stream: "IMAGE:image_frame" + input_side_packet: "NUM_HANDS:num_hands" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + output_stream: "LANDMARKS:multi_hand_landmarks" + output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + output_stream: "HANDEDNESS:multi_handedness" + output_stream: "PALM_DETECTIONS:palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt new file mode 100644 index 0000000..da76f4a --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu.pbtxt @@ -0,0 +1,272 @@ +# MediaPipe graph to detect/predict hand landmarks on GPU. +# +# The procedure is done in two steps: +# - locate palms/hands +# - detect landmarks for each palm/hand. +# This graph tries to skip palm detection as much as possible by reusing +# previously detected/predicted landmarks for new images. + +type: "HandLandmarkTrackingGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Max number of hands to detect/track. (int) +input_side_packet: "NUM_HANDS:num_hands" + +# Complexity of hand landmark and palm detection models: 0 or 1. Accuracy as +# well as inference latency generally go up with the model complexity. If +# unspecified, functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of hands detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_hand_landmarks" + +# Collection of detected/predicted hand world landmarks. +# (std::vector) +# +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the hand bounding box calculated from the landmarks. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + +# Collection of handedness of the detected hands (i.e. is hand left or right), +# each represented as a ClassificationList proto with a single Classification +# entry. (std::vector) +# Note that handedness is determined assuming the input image is mirrored, +# i.e., taken with a front-facing/selfie camera with images flipped +# horizontally. +output_stream: "HANDEDNESS:multi_handedness" + +# Extra outputs (for debugging, for instance). +# Detected palms. (std::vector) +output_stream: "PALM_DETECTIONS:palm_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" +# Regions of interest calculated based on palm detections. +# (std::vector) +output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_hand_rects_from_landmarks" + output_stream: "gated_prev_hand_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided num_hands. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_hand_rects_from_landmarks" + input_side_packet: "num_hands" + output_stream: "prev_has_enough_hands" +} + +# Drops the incoming image if enough hands have already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of palm detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_hands" + output_stream: "palm_detection_image" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects palms. +node { + calculator: "PalmDetectionGpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_stream: "IMAGE:palm_detection_image" + output_stream: "DETECTIONS:all_palm_detections" +} + +# Makes sure there are no more detections than provided num_hands. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "all_palm_detections" + output_stream: "palm_detections" + input_side_packet: "num_hands" +} + +# Extracts image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:palm_detection_image" + output_stream: "SIZE:palm_detection_image_size" +} + +# Outputs each element of palm_detections at a fake timestamp for the rest of +# the graph to process. Clones the image_size packet for each palm_detection at +# the fake timestamp. At the end of the loop, outputs the BATCH_END timestamp +# for downstream calculators to inform them that all elements in the vector have +# been processed. +node { + calculator: "BeginLoopDetectionCalculator" + input_stream: "ITERABLE:palm_detections" + input_stream: "CLONE:palm_detection_image_size" + output_stream: "ITEM:palm_detection" + output_stream: "CLONE:image_size_for_palms" + output_stream: "BATCH_END:palm_detections_timestamp" +} + +# Calculates region of interest (ROI) base on the specified palm. +node { + calculator: "PalmDetectionDetectionToRoi" + input_stream: "DETECTION:palm_detection" + input_stream: "IMAGE_SIZE:image_size_for_palms" + output_stream: "ROI:hand_rect_from_palm_detection" +} + +# Collects a NormalizedRect for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + name: "EndLoopForPalmDetections" + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:hand_rect_from_palm_detection" + input_stream: "BATCH_END:palm_detections_timestamp" + output_stream: "ITERABLE:hand_rects_from_palm_detections" +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on palm detections from the current image. This +# calculator ensures that the output hand_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "hand_rects_from_palm_detections" + input_stream: "gated_prev_hand_rects_from_landmarks" + output_stream: "hand_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.5 + } + } +} + +# Extracts image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Outputs each element of hand_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_hand_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:hand_rects" + input_stream: "CLONE:0:image" + input_stream: "CLONE:1:image_size" + output_stream: "ITEM:single_hand_rect" + output_stream: "CLONE:0:image_for_landmarks" + output_stream: "CLONE:1:image_size_for_landmarks" + output_stream: "BATCH_END:hand_rects_timestamp" +} + +# Detect hand landmarks for the specific hand rect. +node { + calculator: "HandLandmarkGpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_stream: "IMAGE:image_for_landmarks" + input_stream: "ROI:single_hand_rect" + output_stream: "LANDMARKS:single_hand_landmarks" + output_stream: "WORLD_LANDMARKS:single_hand_world_landmarks" + output_stream: "HANDEDNESS:single_handedness" +} + +# Collects the handedness for each single hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs a vector of ClassificationList at the BATCH_END +# timestamp. +node { + calculator: "EndLoopClassificationListCalculator" + input_stream: "ITEM:single_handedness" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_handedness" +} + +# Calculate region of interest (ROI) based on detected hand landmarks to reuse +# on the subsequent runs of the graph. +node { + calculator: "HandLandmarkLandmarksToRoi" + input_stream: "IMAGE_SIZE:image_size_for_landmarks" + input_stream: "LANDMARKS:single_hand_landmarks" + output_stream: "ROI:single_hand_rect_from_landmarks" +} + +# Collects a set of landmarks for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:single_hand_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_hand_landmarks" +} + +# Collects a set of world landmarks for each hand into a vector. Upon receiving +# the BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopLandmarkListVectorCalculator" + input_stream: "ITEM:single_hand_world_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:multi_hand_world_landmarks" +} + +# Collects a NormalizedRect for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of NormalizedRect at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedRectCalculator" + input_stream: "ITEM:single_hand_rect_from_landmarks" + input_stream: "BATCH_END:hand_rects_timestamp" + output_stream: "ITERABLE:hand_rects_from_landmarks" +} + +# Caches hand rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# hand rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:hand_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_hand_rects_from_landmarks" +} diff --git a/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu_image.pbtxt b/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu_image.pbtxt new file mode 100644 index 0000000..8b8e466 --- /dev/null +++ b/mediapipe/modules/hand_landmark/hand_landmark_tracking_gpu_image.pbtxt @@ -0,0 +1,115 @@ +# MediaPipe graph to detect/predict hand landmarks on GPU. +# +# The procedure is done in two steps: +# - locate palms/hands +# - detect landmarks for each palm/hand. +# This graph tries to skip palm detection as much as possible by reusing +# previously detected/predicted landmarks for new images. + +type: "HandLandmarkTrackingGpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# Max number of hands to detect/track. (int) +input_side_packet: "NUM_HANDS:num_hands" + +# Complexity of hand landmark and palm detection models: 0 or 1. Accuracy as +# well as inference latency generally go up with the model complexity. If +# unspecified, functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +# NOTE: there will not be an output packet in the LANDMARKS stream for this +# particular timestamp if none of hands detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:multi_hand_landmarks" + +# Collection of detected/predicted hand world landmarks. +# (std::vector) +# +# World landmarks are real-world 3D coordinates in meters with the origin in the +# center of the hand bounding box calculated from the landmarks. +# +# WORLD_LANDMARKS shares the same landmark topology as LANDMARKS. However, +# LANDMARKS provides coordinates (in pixels) of a 3D object projected onto the +# 2D image surface, while WORLD_LANDMARKS provides coordinates (in meters) of +# the 3D object itself. +output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + +# Collection of handedness of the detected hands (i.e. is hand left or right), +# each represented as a ClassificationList proto with a single Classification +# entry. (std::vector) +# Note that handedness is determined assuming the input image is mirrored, +# i.e., taken with a front-facing/selfie camera with images flipped +# horizontally. +output_stream: "HANDEDNESS:multi_handedness" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" +# Extra outputs (for debugging, for instance). +# Detected palms. (std::vector) +output_stream: "PALM_DETECTIONS:palm_detections" +# Regions of interest calculated based on landmarks. +# (std::vector) +output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" +# Regions of interest calculated based on palm detections. +# (std::vector) +output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:multi_hand_landmarks" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to GpuBuffer for HandLandmarkTrackingGpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_GPU:raw_gpu_buffer" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into HandLandmarkTrackingGpu. This maybe needed because OpenGL represents +# images assuming the image origin is at the bottom-left corner, whereas +# MediaPipe in general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:raw_gpu_buffer" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE_GPU:gpu_buffer" +} + +node { + calculator: "HandLandmarkTrackingGpu" + input_stream: "IMAGE:gpu_buffer" + input_side_packet: "NUM_HANDS:num_hands" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + output_stream: "LANDMARKS:multi_hand_landmarks" + output_stream: "WORLD_LANDMARKS:multi_hand_world_landmarks" + output_stream: "HANDEDNESS:multi_handedness" + output_stream: "PALM_DETECTIONS:palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" +} diff --git a/mediapipe/modules/hand_landmark/handedness.txt b/mediapipe/modules/hand_landmark/handedness.txt new file mode 100644 index 0000000..9f636db --- /dev/null +++ b/mediapipe/modules/hand_landmark/handedness.txt @@ -0,0 +1,2 @@ +Left +Right diff --git a/mediapipe/modules/hand_landmark/palm_detection_detection_to_roi.pbtxt b/mediapipe/modules/hand_landmark/palm_detection_detection_to_roi.pbtxt new file mode 100644 index 0000000..838633b --- /dev/null +++ b/mediapipe/modules/hand_landmark/palm_detection_detection_to_roi.pbtxt @@ -0,0 +1,47 @@ +# MediaPipe subgraph that calculates hand ROI from palm detection. + +type: "PalmDetectionDetectionToRoi" + +# Palm detection. (Detection) +input_stream: "DETECTION:detection" +# Frame size. (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI (region of interest) according to landmarks, represented as normalized +# rect. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts results of palm detection into a rectangle (normalized by image size) +# that encloses the palm and is rotated such that the line connecting center of +# the wrist and MCP of the middle finger is aligned with the Y-axis of the +# rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:raw_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 # Center of wrist. + rotation_vector_end_keypoint_index: 2 # MCP of middle finger. + rotation_vector_target_angle_degrees: 90 + } + } +} + +# Expands and shifts the rectangle that contains the palm so that it's likely +# to cover the entire hand. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:raw_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.6 + scale_y: 2.6 + shift_y: -0.5 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/BUILD b/mediapipe/modules/holistic_landmark/BUILD new file mode 100644 index 0000000..44854c0 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/BUILD @@ -0,0 +1,267 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph") + +# TODO: revert to private. +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +exports_files([ + "hand_recrop.tflite", +]) + +mediapipe_simple_subgraph( + name = "face_landmarks_from_pose_gpu", + graph = "face_landmarks_from_pose_gpu.pbtxt", + register_as = "FaceLandmarksFromPoseGpu", + deps = [ + ":face_detection_front_detections_to_roi", + ":face_landmarks_from_pose_to_recrop_roi", + ":face_tracking", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_by_roi_gpu", + "//mediapipe/modules/face_landmark:face_landmark_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmarks_from_pose_cpu", + graph = "face_landmarks_from_pose_cpu.pbtxt", + register_as = "FaceLandmarksFromPoseCpu", + deps = [ + ":face_detection_front_detections_to_roi", + ":face_landmarks_from_pose_to_recrop_roi", + ":face_tracking", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/modules/face_detection:face_detection_short_range_by_roi_cpu", + "//mediapipe/modules/face_landmark:face_landmark_cpu", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmarks_to_roi", + graph = "face_landmarks_to_roi.pbtxt", + register_as = "FaceLandmarksToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_detection_front_detections_to_roi", + graph = "face_detection_front_detections_to_roi.pbtxt", + register_as = "FaceDetectionFrontDetectionsToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_tracking", + graph = "face_tracking.pbtxt", + register_as = "FaceTracking", + deps = [ + ":face_landmarks_to_roi", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/modules/holistic_landmark/calculators:roi_tracking_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "face_landmarks_from_pose_to_recrop_roi", + graph = "face_landmarks_from_pose_to_recrop_roi.pbtxt", + register_as = "FaceLandmarksFromPoseToRecropRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_from_pose_gpu", + graph = "hand_landmarks_from_pose_gpu.pbtxt", + register_as = "HandLandmarksFromPoseGpu", + deps = [ + ":hand_landmarks_from_pose_to_recrop_roi", + ":hand_recrop_by_roi_gpu", + ":hand_tracking", + ":hand_visibility_from_hand_landmarks_from_pose", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/modules/hand_landmark:hand_landmark_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_from_pose_cpu", + graph = "hand_landmarks_from_pose_cpu.pbtxt", + register_as = "HandLandmarksFromPoseCpu", + deps = [ + ":hand_landmarks_from_pose_to_recrop_roi", + ":hand_recrop_by_roi_cpu", + ":hand_tracking", + ":hand_visibility_from_hand_landmarks_from_pose", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/modules/hand_landmark:hand_landmark_cpu", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_to_roi", + graph = "hand_landmarks_to_roi.pbtxt", + register_as = "HandLandmarksToRoi", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/modules/hand_landmark/calculators:hand_landmarks_to_rect_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_recrop_by_roi_gpu", + graph = "hand_recrop_by_roi_gpu.pbtxt", + register_as = "HandRecropByRoiGpu", + deps = [ + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:alignment_points_to_rects_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_recrop_by_roi_cpu", + graph = "hand_recrop_by_roi_cpu.pbtxt", + register_as = "HandRecropByRoiCpu", + deps = [ + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:alignment_points_to_rects_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_tracking", + graph = "hand_tracking.pbtxt", + register_as = "HandTracking", + deps = [ + ":hand_landmarks_to_roi", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/modules/holistic_landmark/calculators:roi_tracking_calculator", + ], +) + +# TODO: parametrize holistic_landmark graph with visibility and make private. +mediapipe_simple_subgraph( + name = "hand_wrist_for_pose", + graph = "hand_wrist_for_pose.pbtxt", + register_as = "HandWristForPose", + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:set_landmark_visibility_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_left_and_right_gpu", + graph = "hand_landmarks_left_and_right_gpu.pbtxt", + register_as = "HandLandmarksLeftAndRightGpu", + deps = [ + ":hand_landmarks_from_pose_gpu", + "//mediapipe/calculators/core:split_landmarks_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_left_and_right_cpu", + graph = "hand_landmarks_left_and_right_cpu.pbtxt", + register_as = "HandLandmarksLeftAndRightCpu", + deps = [ + ":hand_landmarks_from_pose_cpu", + "//mediapipe/calculators/core:split_landmarks_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_landmarks_from_pose_to_recrop_roi", + graph = "hand_landmarks_from_pose_to_recrop_roi.pbtxt", + register_as = "HandLandmarksFromPoseToRecropRoi", + deps = [ + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/modules/holistic_landmark/calculators:hand_detections_from_pose_to_rects_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "hand_visibility_from_hand_landmarks_from_pose", + graph = "hand_visibility_from_hand_landmarks_from_pose.pbtxt", + register_as = "HandVisibilityFromHandLandmarksFromPose", + deps = [ + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/util:landmark_visibility_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "holistic_landmark_gpu", + graph = "holistic_landmark_gpu.pbtxt", + register_as = "HolisticLandmarkGpu", + visibility = ["//visibility:public"], + deps = [ + ":face_landmarks_from_pose_gpu", + ":hand_landmarks_left_and_right_gpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/modules/pose_landmark:pose_landmark_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "holistic_landmark_cpu", + graph = "holistic_landmark_cpu.pbtxt", + register_as = "HolisticLandmarkCpu", + visibility = ["//visibility:public"], + deps = [ + ":face_landmarks_from_pose_cpu", + ":hand_landmarks_left_and_right_cpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/modules/pose_landmark:pose_landmark_cpu", + ], +) diff --git a/mediapipe/modules/holistic_landmark/README.md b/mediapipe/modules/holistic_landmark/README.md new file mode 100644 index 0000000..d285f15 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/README.md @@ -0,0 +1,6 @@ +# holistic_landmark + +Subgraphs|Details +:--- | :--- +[`HolisticLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (CPU input) +[`HolisticLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt)| Predicts pose + left/right hand + face landmarks. (GPU input.) diff --git a/mediapipe/modules/holistic_landmark/calculators/BUILD b/mediapipe/modules/holistic_landmark/calculators/BUILD new file mode 100644 index 0000000..c3c0919 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/calculators/BUILD @@ -0,0 +1,63 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "hand_detections_from_pose_to_rects_calculator", + srcs = ["hand_detections_from_pose_to_rects_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:detections_to_rects_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/formats:location_data_cc_proto", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + ], + alwayslink = 1, +) + +mediapipe_proto_library( + name = "roi_tracking_calculator_proto", + srcs = ["roi_tracking_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +cc_library( + name = "roi_tracking_calculator", + srcs = ["roi_tracking_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":roi_tracking_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:rectangle", + "@com_google_absl//absl/strings:str_format", + ], + alwayslink = 1, +) diff --git a/mediapipe/modules/holistic_landmark/calculators/hand_detections_from_pose_to_rects_calculator.cc b/mediapipe/modules/holistic_landmark/calculators/hand_detections_from_pose_to_rects_calculator.cc new file mode 100644 index 0000000..5afdb8a --- /dev/null +++ b/mediapipe/modules/holistic_landmark/calculators/hand_detections_from_pose_to_rects_calculator.cc @@ -0,0 +1,156 @@ +#include + +#include "mediapipe/calculators/util/detections_to_rects_calculator.h" +#include "mediapipe/calculators/util/detections_to_rects_calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/formats/detection.pb.h" +#include "mediapipe/framework/formats/location_data.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" + +namespace mediapipe { + +namespace {} // namespace + +// Generates a hand ROI based on a hand detection derived from hand-related pose +// landmarks. +// +// Inputs: +// DETECTION - Detection. +// Detection to convert to ROI. Must contain 3 key points indicating: wrist, +// pinky and index fingers. +// +// IMAGE_SIZE - std::pair +// Image width and height. +// +// Outputs: +// NORM_RECT - NormalizedRect. +// ROI based on passed input. +// +// Examples +// node { +// calculator: "HandDetectionsFromPoseToRectsCalculator" +// input_stream: "DETECTION:hand_detection_from_pose" +// input_stream: "IMAGE_SIZE:image_size" +// output_stream: "NORM_RECT:hand_roi_from_pose" +// } +class HandDetectionsFromPoseToRectsCalculator + : public DetectionsToRectsCalculator { + public: + absl::Status Open(CalculatorContext* cc) override; + + private: + ::absl::Status DetectionToNormalizedRect(const Detection& detection, + const DetectionSpec& detection_spec, + NormalizedRect* rect) override; + absl::Status ComputeRotation(const Detection& detection, + const DetectionSpec& detection_spec, + float* rotation) override; +}; +REGISTER_CALCULATOR(HandDetectionsFromPoseToRectsCalculator); + +namespace { + +constexpr int kWrist = 0; +constexpr int kPinky = 1; +constexpr int kIndex = 2; + +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; + +} // namespace + +::absl::Status HandDetectionsFromPoseToRectsCalculator::Open( + CalculatorContext* cc) { + RET_CHECK(cc->Inputs().HasTag(kImageSizeTag)) + << "Image size is required to calculate rotated rect."; + cc->SetOffset(TimestampDiff(0)); + target_angle_ = M_PI * 0.5f; + rotate_ = true; + options_ = cc->Options(); + output_zero_rect_for_empty_detections_ = + options_.output_zero_rect_for_empty_detections(); + + return ::absl::OkStatus(); +} + +::absl::Status +HandDetectionsFromPoseToRectsCalculator ::DetectionToNormalizedRect( + const Detection& detection, const DetectionSpec& detection_spec, + NormalizedRect* rect) { + const auto& location_data = detection.location_data(); + const auto& image_size = detection_spec.image_size; + RET_CHECK(image_size) << "Image size is required to calculate rotation"; + + const float x_wrist = + location_data.relative_keypoints(kWrist).x() * image_size->first; + const float y_wrist = + location_data.relative_keypoints(kWrist).y() * image_size->second; + + const float x_index = + location_data.relative_keypoints(kIndex).x() * image_size->first; + const float y_index = + location_data.relative_keypoints(kIndex).y() * image_size->second; + + const float x_pinky = + location_data.relative_keypoints(kPinky).x() * image_size->first; + const float y_pinky = + location_data.relative_keypoints(kPinky).y() * image_size->second; + + // Estimate middle finger. + const float x_middle = (2.f * x_index + x_pinky) / 3.f; + const float y_middle = (2.f * y_index + y_pinky) / 3.f; + + // Crop center as middle finger. + const float center_x = x_middle; + const float center_y = y_middle; + + // Bounding box size as double distance from middle finger to wrist. + const float box_size = + std::sqrt((x_middle - x_wrist) * (x_middle - x_wrist) + + (y_middle - y_wrist) * (y_middle - y_wrist)) * + 2.0; + + // Set resulting bounding box. + rect->set_x_center(center_x / image_size->first); + rect->set_y_center(center_y / image_size->second); + rect->set_width(box_size / image_size->first); + rect->set_height(box_size / image_size->second); + + return ::absl::OkStatus(); +} + +absl::Status HandDetectionsFromPoseToRectsCalculator::ComputeRotation( + const Detection& detection, const DetectionSpec& detection_spec, + float* rotation) { + const auto& location_data = detection.location_data(); + const auto& image_size = detection_spec.image_size; + RET_CHECK(image_size) << "Image size is required to calculate rotation"; + + const float x_wrist = + location_data.relative_keypoints(kWrist).x() * image_size->first; + const float y_wrist = + location_data.relative_keypoints(kWrist).y() * image_size->second; + + const float x_index = + location_data.relative_keypoints(kIndex).x() * image_size->first; + const float y_index = + location_data.relative_keypoints(kIndex).y() * image_size->second; + + const float x_pinky = + location_data.relative_keypoints(kPinky).x() * image_size->first; + const float y_pinky = + location_data.relative_keypoints(kPinky).y() * image_size->second; + + // Estimate middle finger. + const float x_middle = (2.f * x_index + x_pinky) / 3.f; + const float y_middle = (2.f * y_index + y_pinky) / 3.f; + + *rotation = NormalizeRadians( + target_angle_ - std::atan2(-(y_middle - y_wrist), x_middle - x_wrist)); + + return ::absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.cc b/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.cc new file mode 100644 index 0000000..0da6cd7 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.cc @@ -0,0 +1,358 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include + +#include "absl/strings/str_format.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/rectangle.h" +#include "mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kPrevLandmarksTag[] = "PREV_LANDMARKS"; +constexpr char kPrevLandmarksRectTag[] = "PREV_LANDMARKS_RECT"; +constexpr char kRecropRectTag[] = "RECROP_RECT"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kTrackingRectTag[] = "TRACKING_RECT"; + +// TODO: Use rect rotation. +// Verifies that Intersection over Union of previous frame rect and current +// frame re-crop rect is less than threshold. +bool IouRequirementsSatisfied(const NormalizedRect& prev_rect, + const NormalizedRect& recrop_rect, + const std::pair& image_size, + const float min_iou) { + auto r1 = Rectangle_f(prev_rect.x_center() * image_size.first, + prev_rect.y_center() * image_size.second, + prev_rect.width() * image_size.first, + prev_rect.height() * image_size.second); + auto r2 = Rectangle_f(recrop_rect.x_center() * image_size.first, + recrop_rect.y_center() * image_size.second, + recrop_rect.width() * image_size.first, + recrop_rect.height() * image_size.second); + + const float intersection_area = r1.Intersect(r2).Area(); + const float union_area = r1.Area() + r2.Area() - intersection_area; + + const float intersection_threshold = union_area * min_iou; + if (intersection_area < intersection_threshold) { + VLOG(1) << absl::StrFormat("Lost tracking: IoU intersection %f < %f", + intersection_area, intersection_threshold); + return false; + } + + return true; +} + +// Verifies that current frame re-crop rect rotation/translation/scale didn't +// change much comparing to the previous frame rect. Translation and scale are +// normalized by current frame re-crop rect. +bool RectRequirementsSatisfied(const NormalizedRect& prev_rect, + const NormalizedRect& recrop_rect, + const std::pair image_size, + const float rotation_degrees, + const float translation, const float scale) { + // Rotate both rects so that re-crop rect edges are parallel to XY axes. That + // will allow to compute x/y translation of the previous frame rect along axes + // of the current frame re-crop rect. + const float rotation = -recrop_rect.rotation(); + + const float cosa = cos(rotation); + const float sina = sin(rotation); + + // Rotate previous frame rect and get its parameters. + const float prev_rect_x = prev_rect.x_center() * image_size.first * cosa - + prev_rect.y_center() * image_size.second * sina; + const float prev_rect_y = prev_rect.x_center() * image_size.first * sina + + prev_rect.y_center() * image_size.second * cosa; + const float prev_rect_width = prev_rect.width() * image_size.first; + const float prev_rect_height = prev_rect.height() * image_size.second; + const float prev_rect_rotation = prev_rect.rotation() / M_PI * 180.f; + + // Rotate current frame re-crop rect and get its parameters. + const float recrop_rect_x = recrop_rect.x_center() * image_size.first * cosa - + recrop_rect.y_center() * image_size.second * sina; + const float recrop_rect_y = recrop_rect.x_center() * image_size.first * sina + + recrop_rect.y_center() * image_size.second * cosa; + const float recrop_rect_width = recrop_rect.width() * image_size.first; + const float recrop_rect_height = recrop_rect.height() * image_size.second; + const float recrop_rect_rotation = recrop_rect.rotation() / M_PI * 180.f; + + // Rect requirements are satisfied unless one of the checks below fails. + bool satisfied = true; + + // Ensure that rotation diff is in [0, 180] range. + float rotation_diff = prev_rect_rotation - recrop_rect_rotation; + if (rotation_diff > 180.f) { + rotation_diff -= 360.f; + } + if (rotation_diff < -180.f) { + rotation_diff += 360.f; + } + rotation_diff = abs(rotation_diff); + if (rotation_diff > rotation_degrees) { + satisfied = false; + VLOG(1) << absl::StrFormat("Lost tracking: rect rotation %f > %f", + rotation_diff, rotation_degrees); + } + + const float x_diff = abs(prev_rect_x - recrop_rect_x); + const float x_threshold = recrop_rect_width * translation; + if (x_diff > x_threshold) { + satisfied = false; + VLOG(1) << absl::StrFormat("Lost tracking: rect x translation %f > %f", + x_diff, x_threshold); + } + + const float y_diff = abs(prev_rect_y - recrop_rect_y); + const float y_threshold = recrop_rect_height * translation; + if (y_diff > y_threshold) { + satisfied = false; + VLOG(1) << absl::StrFormat("Lost tracking: rect y translation %f > %f", + y_diff, y_threshold); + } + + const float width_diff = abs(prev_rect_width - recrop_rect_width); + const float width_threshold = recrop_rect_width * scale; + if (width_diff > width_threshold) { + satisfied = false; + VLOG(1) << absl::StrFormat("Lost tracking: rect width %f > %f", width_diff, + width_threshold); + } + + const float height_diff = abs(prev_rect_height - recrop_rect_height); + const float height_threshold = recrop_rect_height * scale; + if (height_diff > height_threshold) { + satisfied = false; + VLOG(1) << absl::StrFormat("Lost tracking: rect height %f > %f", + height_diff, height_threshold); + } + + return satisfied; +} + +// Verifies that landmarks from the previous frame are within re-crop rectangle +// bounds on the current frame. +bool LandmarksRequirementsSatisfied(const NormalizedLandmarkList& landmarks, + const NormalizedRect& recrop_rect, + const std::pair image_size, + const float recrop_rect_margin) { + // Rotate both re-crop rectangle and landmarks so that re-crop rectangle edges + // are parallel to XY axes. It will allow to easily check if landmarks are + // within re-crop rect bounds along re-crop rect axes. + // + // Rect rotation is specified clockwise. To apply cos/sin functions we + // transform it into counterclockwise. + const float rotation = -recrop_rect.rotation(); + + const float cosa = cos(rotation); + const float sina = sin(rotation); + + // Rotate rect. + const float rect_x = recrop_rect.x_center() * image_size.first * cosa - + recrop_rect.y_center() * image_size.second * sina; + const float rect_y = recrop_rect.x_center() * image_size.first * sina + + recrop_rect.y_center() * image_size.second * cosa; + const float rect_width = + recrop_rect.width() * image_size.first * (1.f + recrop_rect_margin); + const float rect_height = + recrop_rect.height() * image_size.second * (1.f + recrop_rect_margin); + + // Get rect bounds. + const float rect_left = rect_x - rect_width * 0.5f; + const float rect_right = rect_x + rect_width * 0.5f; + const float rect_top = rect_y - rect_height * 0.5f; + const float rect_bottom = rect_y + rect_height * 0.5f; + + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const auto& landmark = landmarks.landmark(i); + const float x = landmark.x() * image_size.first * cosa - + landmark.y() * image_size.second * sina; + const float y = landmark.x() * image_size.first * sina + + landmark.y() * image_size.second * cosa; + + if (!(rect_left < x && x < rect_right && rect_top < y && y < rect_bottom)) { + VLOG(1) << "Lost tracking: landmarks out of re-crop rect"; + return false; + } + } + + return true; +} + +} // namespace + +// A calculator to track object rectangle between frames. +// +// Calculator checks that all requirements for tracking are satisfied and uses +// rectangle from the previous frame in this case, otherwise - uses current +// frame re-crop rectangle. +// +// There are several types of tracking requirements that can be configured via +// options: +// IoU: Verifies that IoU of the previous frame rectangle and current frame +// re-crop rectangle is less than a given threshold. +// Rect parameters: Verifies that rotation/translation/scale of the re-crop +// rectangle on the current frame is close to the rectangle from the +// previous frame within given thresholds. +// Landmarks: Verifies that landmarks from the previous frame are within +// the re-crop rectangle on the current frame. +// +// Inputs: +// PREV_LANDMARKS: Object landmarks from the previous frame. +// PREV_LANDMARKS_RECT: Object rectangle based on the landmarks from the +// previous frame. +// RECROP_RECT: Object re-crop rectangle from the current frame. +// IMAGE_SIZE: Image size to transform normalized coordinates to absolute. +// +// Outputs: +// TRACKING_RECT: Rectangle to use for object prediction on the current frame. +// It will be either object rectangle from the previous frame (if all +// tracking requirements are satisfied) or re-crop rectangle from the +// current frame (if tracking lost the object). +// +// Example config: +// node { +// calculator: "RoiTrackingCalculator" +// input_stream: "PREV_LANDMARKS:prev_hand_landmarks" +// input_stream: "PREV_LANDMARKS_RECT:prev_hand_landmarks_rect" +// input_stream: "RECROP_RECT:hand_recrop_rect" +// input_stream: "IMAGE_SIZE:image_size" +// output_stream: "TRACKING_RECT:hand_tracking_rect" +// options: { +// [mediapipe.RoiTrackingCalculatorOptions.ext] { +// rect_requirements: { +// rotation_degrees: 40.0 +// translation: 0.2 +// scale: 0.4 +// } +// landmarks_requirements: { +// recrop_rect_margin: -0.1 +// } +// } +// } +// } +class RoiTrackingCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + RoiTrackingCalculatorOptions options_; +}; +REGISTER_CALCULATOR(RoiTrackingCalculator); + +absl::Status RoiTrackingCalculator::GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kPrevLandmarksTag).Set(); + cc->Inputs().Tag(kPrevLandmarksRectTag).Set(); + cc->Inputs().Tag(kRecropRectTag).Set(); + cc->Inputs().Tag(kImageSizeTag).Set>(); + cc->Outputs().Tag(kTrackingRectTag).Set(); + + return absl::OkStatus(); +} + +absl::Status RoiTrackingCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + options_ = cc->Options(); + return absl::OkStatus(); +} + +absl::Status RoiTrackingCalculator::Process(CalculatorContext* cc) { + // If there is no current frame re-crop rect (i.e. object is not present on + // the current frame) - return empty packet. + if (cc->Inputs().Tag(kRecropRectTag).IsEmpty()) { + return absl::OkStatus(); + } + + // If there is no previous rect, but there is current re-crop rect - return + // current re-crop rect as is. + if (cc->Inputs().Tag(kPrevLandmarksRectTag).IsEmpty()) { + cc->Outputs() + .Tag(kTrackingRectTag) + .AddPacket(cc->Inputs().Tag(kRecropRectTag).Value()); + return absl::OkStatus(); + } + + // At this point we have both previous rect (which also means we have previous + // landmarks) and currrent re-crop rect. + const auto& prev_landmarks = + cc->Inputs().Tag(kPrevLandmarksTag).Get(); + const auto& prev_rect = + cc->Inputs().Tag(kPrevLandmarksRectTag).Get(); + const auto& recrop_rect = + cc->Inputs().Tag(kRecropRectTag).Get(); + const auto& image_size = + cc->Inputs().Tag(kImageSizeTag).Get>(); + + // Keep tracking unless one of the requirements below is not satisfied. + bool keep_tracking = true; + + // If IoU of the previous rect and current re-crop rect is lower than allowed + // threshold - use current re-crop rect. + if (options_.has_iou_requirements() && + !IouRequirementsSatisfied(prev_rect, recrop_rect, image_size, + options_.iou_requirements().min_iou())) { + keep_tracking = false; + } + + // If previous rect and current re-crop rect differ more than it is allowed by + // the augmentations (used during the model training) - use current re-crop + // rect. + if (options_.has_rect_requirements() && + !RectRequirementsSatisfied( + prev_rect, recrop_rect, image_size, + options_.rect_requirements().rotation_degrees(), + options_.rect_requirements().translation(), + options_.rect_requirements().scale())) { + keep_tracking = false; + } + + // If landmarks from the previous frame are not in the current re-crop rect + // (i.e. object moved too fast and using previous frame rect won't cover + // landmarks on the current frame) - use current re-crop rect. + if (options_.has_landmarks_requirements() && + !LandmarksRequirementsSatisfied( + prev_landmarks, recrop_rect, image_size, + options_.landmarks_requirements().recrop_rect_margin())) { + keep_tracking = false; + } + + // If object didn't move a lot comparing to the previous frame - we'll keep + // tracking it and will return rect from the previous frame, otherwise - + // return re-crop rect from the current frame. + if (keep_tracking) { + cc->Outputs() + .Tag(kTrackingRectTag) + .AddPacket(cc->Inputs().Tag(kPrevLandmarksRectTag).Value()); + } else { + cc->Outputs() + .Tag(kTrackingRectTag) + .AddPacket(cc->Inputs().Tag(kRecropRectTag).Value()); + VLOG(1) << "Lost tracking: check messages above for details"; + } + + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.proto b/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.proto new file mode 100644 index 0000000..ec3cf22 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/calculators/roi_tracking_calculator.proto @@ -0,0 +1,59 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message RoiTrackingCalculatorOptions { + extend CalculatorOptions { + optional RoiTrackingCalculatorOptions ext = 329994630; + } + + // Verifies that Intersection over Union of previous frame rect and current + // frame re-crop rect is less than threshold. + message IouRequirements { + optional float min_iou = 1 [default = 0.5]; + } + + // Verifies that current frame re-crop rect rotation/translation/scale didn't + // change much comparing to the previous frame rect. + message RectRequirements { + // Allowed rotation change defined in degrees. + optional float rotation_degrees = 1 [default = 10.0]; + + // Allowed translation change defined as absolute translation normalized by + // re-crop rectangle size. + optional float translation = 2 [default = 0.1]; + + // Allowed scale change defined as absolute translation normalized by + // re-crop rectangle size. + optional float scale = 3 [default = 0.1]; + } + + // Verifies that landmarks from the previous frame are within re-crop + // rectangle bounds on the current frame. + message LandmarksRequirements { + // Margin to apply to re-crop rectangle before checking verifing landmarks. + optional float recrop_rect_margin = 1 [default = 0.0]; + } + + optional IouRequirements iou_requirements = 1; + + optional RectRequirements rect_requirements = 2; + + optional LandmarksRequirements landmarks_requirements = 3; +} diff --git a/mediapipe/modules/holistic_landmark/face_detection_front_detections_to_roi.pbtxt b/mediapipe/modules/holistic_landmark/face_detection_front_detections_to_roi.pbtxt new file mode 100644 index 0000000..7d9fa9e --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_detection_front_detections_to_roi.pbtxt @@ -0,0 +1,48 @@ +# Calculates ROI from detections provided by `face_detection_short_range.tflite` +# model. +type: "FaceDetectionFrontDetectionsToRoi" + +# Detected faces. (std::vector) +input_stream: "DETECTIONS:detections" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Refined (more accurate) ROI to use for face landmarks prediction. +# (NormalizedRect) +output_stream: "ROI:roi" + +# Converts the face detection into a rectangle (normalized by image size) +# that encloses the face and is rotated such that the line connecting right side +# of the right eye and left side of the left eye is aligned with the X-axis of +# the rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTIONS:detections" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:raw_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 # Right eye. + rotation_vector_end_keypoint_index: 1 # Left eye. + rotation_vector_target_angle_degrees: 0 + conversion_mode: USE_KEYPOINTS + } + } +} + +# Expands and shifts the rectangle that contains the face so that it's likely +# to cover the entire face. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:raw_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.0 + scale_y: 2.0 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_cpu.pbtxt b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_cpu.pbtxt new file mode 100644 index 0000000..1d99672 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_cpu.pbtxt @@ -0,0 +1,82 @@ +# Predicts face landmarks within an ROI derived from face-related pose +# landmarks. + +type: "FaceLandmarksFromPoseCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Face-related pose landmarks. (NormalizedLandmarkList) +input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + +# Whether to run the face landmark model with attention on lips and eyes to +# provide more accuracy, and additionally output iris landmarks. If unspecified, +# functions as set to false. (bool) +input_side_packet: "REFINE_LANDMARKS:refine_landmarks" + +# Face landmarks. (NormalizedLandmarkList) +output_stream: "FACE_LANDMARKS:face_landmarks" + +# Debug outputs. +# Face ROI derived from face-related pose landmarks, which defines the search +# region for the face detection model. (NormalizedRect) +output_stream: "FACE_ROI_FROM_POSE:face_roi_from_pose" +# Refined face crop rectangle predicted by face detection model. +# (NormalizedRect) +output_stream: "FACE_ROI_FROM_DETECTION:face_roi_from_detection" +# Rectangle used to predict face landmarks. (NormalizedRect) +output_stream: "FACE_TRACKING_ROI:face_tracking_roi" + +# TODO: do not predict face when most of the face landmarks from +# pose are invisible. + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_video" + output_stream: "SIZE:image_size" +} + +# Gets ROI for re-crop model from face-related pose landmarks. +node { + calculator: "FaceLandmarksFromPoseToRecropRoi" + input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:face_roi_from_pose" +} + +# Detects faces within the face ROI calculated from pose landmarks. This is done +# to refine face ROI for further landmark detection as ROI calculated from +# pose landmarks may be inaccurate. +node { + calculator: "FaceDetectionShortRangeByRoiCpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:face_roi_from_pose" + output_stream: "DETECTIONS:face_detections" +} + +# Calculates refined face ROI. +node { + calculator: "FaceDetectionFrontDetectionsToRoi" + input_stream: "DETECTIONS:face_detections" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:face_roi_from_detection" +} + +# Gets face tracking rectangle (either face rectangle from the previous +# frame or face re-crop rectangle from the current frame) for face prediction. +node { + calculator: "FaceTracking" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "FACE_RECROP_ROI:face_roi_from_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "FACE_TRACKING_ROI:face_tracking_roi" +} + +# Predicts face landmarks from the tracking rectangle. +node { + calculator: "FaceLandmarkCpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:face_tracking_roi" + input_side_packet: "WITH_ATTENTION:refine_landmarks" + output_stream: "LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_gpu.pbtxt b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_gpu.pbtxt new file mode 100644 index 0000000..24a9854 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_gpu.pbtxt @@ -0,0 +1,82 @@ +# Predicts face landmarks within an ROI derived from face-related pose +# landmarks. + +type: "FaceLandmarksFromPoseGpu" + +# GPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Face-related pose landmarks. (NormalizedLandmarkList) +input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + +# Whether to run the face landmark model with attention on lips and eyes to +# provide more accuracy, and additionally output iris landmarks. If unspecified, +# functions as set to false. (bool) +input_side_packet: "REFINE_LANDMARKS:refine_landmarks" + +# Face landmarks. (NormalizedLandmarkList) +output_stream: "FACE_LANDMARKS:face_landmarks" + +# Debug outputs. +# Face ROI derived from face-related pose landmarks, which defines the search +# region for the face detection model. (NormalizedRect) +output_stream: "FACE_ROI_FROM_POSE:face_roi_from_pose" +# Refined face crop rectangle predicted by face detection model. +# (NormalizedRect) +output_stream: "FACE_ROI_FROM_DETECTION:face_roi_from_detection" +# Rectangle used to predict face landmarks. (NormalizedRect) +output_stream: "FACE_TRACKING_ROI:face_tracking_roi" + +# TODO: do not predict face when most of the face landmarks from +# pose are invisible. + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "SIZE:image_size" +} + +# Gets ROI for re-crop model from face-related pose landmarks. +node { + calculator: "FaceLandmarksFromPoseToRecropRoi" + input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:face_roi_from_pose" +} + +# Detects faces within the face ROI calculated from pose landmarks. This is done +# to refine face ROI for further landmark detection as ROI calculated from +# pose landmarks may be inaccurate. +node { + calculator: "FaceDetectionShortRangeByRoiGpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:face_roi_from_pose" + output_stream: "DETECTIONS:face_detections" +} + +# Calculates refined face ROI. +node { + calculator: "FaceDetectionFrontDetectionsToRoi" + input_stream: "DETECTIONS:face_detections" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:face_roi_from_detection" +} + +# Gets face tracking rectangle (either face rectangle from the previous +# frame or face re-crop rectangle from the current frame) for face prediction. +node { + calculator: "FaceTracking" + input_stream: "LANDMARKS:face_landmarks" + input_stream: "FACE_RECROP_ROI:face_roi_from_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "FACE_TRACKING_ROI:face_tracking_roi" +} + +# Predicts face landmarks from the tracking rectangle. +node { + calculator: "FaceLandmarkGpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:face_tracking_roi" + input_side_packet: "WITH_ATTENTION:refine_landmarks" + output_stream: "LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_to_recrop_roi.pbtxt b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_to_recrop_roi.pbtxt new file mode 100644 index 0000000..65bd340 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_landmarks_from_pose_to_recrop_roi.pbtxt @@ -0,0 +1,51 @@ +# Converts face-related pose landmarks to re-crop ROI. + +type: "FaceLandmarksFromPoseToRecropRoi" + +# Face-related pose landmarks (There should be 11 of them). +# (NormalizedLandmarkList) +input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI to be used for face detection. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts face-related pose landmarks to a detection that tightly encloses all +# landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks_from_pose" + output_stream: "DETECTION:pose_face_detection" +} + +# Converts face detection to a normalized face rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:pose_face_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:pose_face_rect" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 5 # Right eye. + rotation_vector_end_keypoint_index: 2 # Left eye. + rotation_vector_target_angle_degrees: 0 + } + } +} + +# Expands face rectangle so that it becomes big enough for face detector to +# localize it accurately. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:pose_face_rect" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 3.0 + scale_y: 3.0 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/face_landmarks_to_roi.pbtxt b/mediapipe/modules/holistic_landmark/face_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..8913cc1 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_landmarks_to_roi.pbtxt @@ -0,0 +1,53 @@ +# Converts face landmarks to ROI. + +type: "FaceLandmarksToRoi" + +# Face landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:face_landmarks" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI according to landmarks. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts face landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:face_landmarks" + output_stream: "DETECTION:face_detection" +} + +# Converts the face detection into a rectangle (normalized by image size) +# that encloses the face and is rotated such that the line connecting center of +# the wrist and MCP of the middle finger is aligned with the Y-axis of the +# rectangle. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:face_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:face_landmarks_rect_tight" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 33 # Right side of left eye. + rotation_vector_end_keypoint_index: 263 # Left side of right eye. + rotation_vector_target_angle_degrees: 0 + } + } +} + +# Expands the face rectangle so that it's likely to contain the face even with +# some motion. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:face_landmarks_rect_tight" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.5 + scale_y: 1.5 + # TODO: remove `square_long` where appropriat + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/face_tracking.pbtxt b/mediapipe/modules/holistic_landmark/face_tracking.pbtxt new file mode 100644 index 0000000..53022d3 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/face_tracking.pbtxt @@ -0,0 +1,61 @@ +# Decides what ROI to use for face landmarks prediction: either previous frame +# landmarks ROI or the current frame face re-crop ROI. + +type: "FaceTracking" + +# Face landmarks from the current frame. They will be memorized for tracking on +# the next frame. (NormalizedLandmarkList) +input_stream: "LANDMARKS:face_landmarks" +# Face re-crop ROI from the current frame. (NormalizedRect) +input_stream: "FACE_RECROP_ROI:face_recrop_roi" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Face tracking ROI. Which is either face landmarks ROI from the previous frame +# if face is still tracked, or face re-crop ROI from the current frame +# otherwise. (NormalizedRect) +output_stream: "FACE_TRACKING_ROI:face_tracking_roi" + +# Keeps track of face landmarks from the previous frame. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image_size" + input_stream: "LOOP:face_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_face_landmarks" +} + +# Gets hand landarmsk rect. +node { + calculator: "FaceLandmarksToRoi" + input_stream: "LANDMARKS:prev_face_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:prev_face_landmarks_rect" +} + +# Checks that all requirements for tracking are satisfied and use face rectangle +# from the previous frame in that case. Otherwise - use face re-crop rectangle +# from the current frame. +node { + calculator: "RoiTrackingCalculator" + input_stream: "PREV_LANDMARKS:prev_face_landmarks" + input_stream: "PREV_LANDMARKS_RECT:prev_face_landmarks_rect" + input_stream: "RECROP_RECT:face_recrop_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "TRACKING_RECT:face_tracking_roi" + options: { + [mediapipe.RoiTrackingCalculatorOptions.ext] { + rect_requirements: { + rotation_degrees: 15.0 + translation: 0.1 + scale: 0.3 + } + landmarks_requirements: { + recrop_rect_margin: -0.2 + } + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_cpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_cpu.pbtxt new file mode 100644 index 0000000..0a44bcb --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_cpu.pbtxt @@ -0,0 +1,78 @@ +# Predicts hand landmarks within a ROI derived from hand-related pose landmarks. + +type: "HandLandmarksFromPoseCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Hand-related pose landmarks in [wrist, pinky, index] order. +# (NormalizedLandmarkList) +input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" + +# Hand landmarks. (NormalizedLandmarkList) +output_stream: "HAND_LANDMARKS:hand_landmarks" + +# Debug outputs. +# Hand ROI derived from hand-related landmarks, which defines the search region +# for the hand re-crop model. (NormalizedRect) +output_stream: "HAND_ROI_FROM_POSE:hand_roi_from_pose" +# Refined hand crop rectangle predicted by hand re-crop model. (NormalizedRect) +output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" +# Rectangle used to predict hand landmarks. (NormalizedRect) +output_stream: "HAND_TRACKING_ROI:hand_tracking_roi" + +# Gets hand visibility. +node { + calculator: "HandVisibilityFromHandLandmarksFromPose" + input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" + output_stream: "VISIBILITY:hand_visibility" +} + +# Drops hand-related pose landmarks if pose wrist is not visible. It will +# prevent from predicting hand landmarks on the current frame. +node { + calculator: "GateCalculator" + input_stream: "hand_landmarks_from_pose" + input_stream: "ALLOW:hand_visibility" + output_stream: "ensured_hand_landmarks_from_pose" +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_video" + output_stream: "SIZE:image_size" +} + +# Gets ROI for re-crop model from hand-related pose landmarks. +node { + calculator: "HandLandmarksFromPoseToRecropRoi" + input_stream: "HAND_LANDMARKS_FROM_POSE:ensured_hand_landmarks_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:hand_roi_from_pose" +} + +# Predicts hand re-crop rectangle on the current frame. +node { + calculator: "HandRecropByRoiCpu", + input_stream: "IMAGE:input_video" + input_stream: "ROI:hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" +} + +# Gets hand tracking rectangle (either hand rectangle from the previous +# frame or hand re-crop rectangle from the current frame) for hand prediction. +node { + calculator: "HandTracking" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "HAND_TRACKING_ROI:hand_tracking_roi" +} + +# Predicts hand landmarks from the tracking rectangle. +node { + calculator: "HandLandmarkCpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:hand_tracking_roi" + output_stream: "LANDMARKS:hand_landmarks" +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_gpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_gpu.pbtxt new file mode 100644 index 0000000..0296e7d --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_gpu.pbtxt @@ -0,0 +1,78 @@ +# Predicts hand landmarks within a ROI derived from hand-related pose landmarks. + +type: "HandLandmarksFromPoseGpu" + +# GPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Hand-related pose landmarks in [wrist, pinky, index] order. +# (NormalizedLandmarkList) +input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" + +# Hand landmarks. (NormalizedLandmarkList) +output_stream: "HAND_LANDMARKS:hand_landmarks" + +# Debug outputs. +# Hand ROI derived from hand-related landmarks, which defines the search region +# for the hand re-crop model. (NormalizedRect) +output_stream: "HAND_ROI_FROM_POSE:hand_roi_from_pose" +# Refined hand crop rectangle predicted by hand re-crop model. (NormalizedRect) +output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" +# Rectangle used to predict hand landmarks. (NormalizedRect) +output_stream: "HAND_TRACKING_ROI:hand_tracking_roi" + +# Gets hand visibility. +node { + calculator: "HandVisibilityFromHandLandmarksFromPose" + input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" + output_stream: "VISIBILITY:hand_visibility" +} + +# Drops hand-related pose landmarks if pose wrist is not visible. It will +# prevent from predicting hand landmarks on the current frame. +node { + calculator: "GateCalculator" + input_stream: "hand_landmarks_from_pose" + input_stream: "ALLOW:hand_visibility" + output_stream: "ensured_hand_landmarks_from_pose" +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "SIZE:image_size" +} + +# Gets ROI for re-crop model from hand-related pose landmarks. +node { + calculator: "HandLandmarksFromPoseToRecropRoi" + input_stream: "HAND_LANDMARKS_FROM_POSE:ensured_hand_landmarks_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:hand_roi_from_pose" +} + +# Predicts hand re-crop rectangle on the current frame. +node { + calculator: "HandRecropByRoiGpu", + input_stream: "IMAGE:input_video" + input_stream: "ROI:hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" +} + +# Gets hand tracking rectangle (either hand rectangle from the previous +# frame or hand re-crop rectangle from the current frame) for hand prediction. +node { + calculator: "HandTracking" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "HAND_TRACKING_ROI:hand_tracking_roi" +} + +# Predicts hand landmarks from the tracking rectangle. +node { + calculator: "HandLandmarkGpu" + input_stream: "IMAGE:input_video" + input_stream: "ROI:hand_tracking_roi" + output_stream: "LANDMARKS:hand_landmarks" +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_to_recrop_roi.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_to_recrop_roi.pbtxt new file mode 100644 index 0000000..1c2cfe5 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_from_pose_to_recrop_roi.pbtxt @@ -0,0 +1,45 @@ +# Converts hand-related pose landmarks to hand re-crop ROI. + +type: "HandLandmarksFromPoseToRecropRoi" + +# Hand-related pose landmarks in [wrist, pinky, index] order. +# (NormalizedLandmarkList) +input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI to be used for re-crop prediction. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts hand-related pose landmarks to a detection that tightly encloses all +# of them. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:hand_landmarks_from_pose" + output_stream: "DETECTION:hand_detection_from_pose" +} + +# Converts hand detection to a normalized hand rectangle. +node { + calculator: "HandDetectionsFromPoseToRectsCalculator" + input_stream: "DETECTION:hand_detection_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:hand_roi_from_pose" +} + +# Expands the palm rectangle so that it becomes big enough for hand re-crop +# model to localize it accurately. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:hand_roi_from_pose" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.7 + scale_y: 2.7 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_cpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_cpu.pbtxt new file mode 100644 index 0000000..75e0133 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_cpu.pbtxt @@ -0,0 +1,76 @@ +# Predicts left and right hand landmarks within corresponding ROIs derived from +# hand-related pose landmarks. + +type: "HandLandmarksLeftAndRightCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Pose landmarks to derive initial hand location from. (NormalizedLandmarkList) +input_stream: "POSE_LANDMARKS:pose_landmarks" + +# Left hand landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# RIght hand landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" + +# Debug outputs. +output_stream: "LEFT_HAND_ROI_FROM_POSE:left_hand_roi_from_pose" +output_stream: "LEFT_HAND_ROI_FROM_RECROP:left_hand_roi_from_recrop" +output_stream: "LEFT_HAND_TRACKING_ROI:left_hand_tracking_roi" +output_stream: "RIGHT_HAND_ROI_FROM_POSE:right_hand_roi_from_pose" +output_stream: "RIGHT_HAND_ROI_FROM_RECROP:right_hand_roi_from_recrop" +output_stream: "RIGHT_HAND_TRACKING_ROI:right_hand_tracking_roi" + +# Extracts left-hand-related landmarks from the pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "left_hand_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 15 end: 16 } + ranges: { begin: 17 end: 18 } + ranges: { begin: 19 end: 20 } + combine_outputs: true + } + } +} + +# Predicts left hand landmarks. +node { + calculator: "HandLandmarksFromPoseCpu" + input_stream: "IMAGE:input_video" + input_stream: "HAND_LANDMARKS_FROM_POSE:left_hand_landmarks_from_pose" + output_stream: "HAND_LANDMARKS:left_hand_landmarks" + # Debug outputs. + output_stream: "HAND_ROI_FROM_POSE:left_hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:left_hand_roi_from_recrop" + output_stream: "HAND_TRACKING_ROI:left_hand_tracking_roi" +} + +# Extracts right-hand-related landmarks from the pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "right_hand_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 16 end: 17 } + ranges: { begin: 18 end: 19 } + ranges: { begin: 20 end: 21 } + combine_outputs: true + } + } +} + +# Extracts right-hand-related landmarks from the pose landmarks. +node { + calculator: "HandLandmarksFromPoseCpu" + input_stream: "IMAGE:input_video" + input_stream: "HAND_LANDMARKS_FROM_POSE:right_hand_landmarks_from_pose" + output_stream: "HAND_LANDMARKS:right_hand_landmarks" + # Debug outputs. + output_stream: "HAND_ROI_FROM_POSE:right_hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:right_hand_roi_from_recrop" + output_stream: "HAND_TRACKING_ROI:right_hand_tracking_roi" +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_gpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_gpu.pbtxt new file mode 100644 index 0000000..adeec2b --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_left_and_right_gpu.pbtxt @@ -0,0 +1,76 @@ +# Predicts left and right hand landmarks within corresponding ROIs derived from +# hand-related pose landmarks. + +type: "HandLandmarksLeftAndRightGpu" + +# GPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# Pose landmarks to derive initial hand location from. (NormalizedLandmarkList) +input_stream: "POSE_LANDMARKS:pose_landmarks" + +# Left hand landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# RIght hand landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" + +# Debug outputs. +output_stream: "LEFT_HAND_ROI_FROM_POSE:left_hand_roi_from_pose" +output_stream: "LEFT_HAND_ROI_FROM_RECROP:left_hand_roi_from_recrop" +output_stream: "LEFT_HAND_TRACKING_ROI:left_hand_tracking_roi" +output_stream: "RIGHT_HAND_ROI_FROM_POSE:right_hand_roi_from_pose" +output_stream: "RIGHT_HAND_ROI_FROM_RECROP:right_hand_roi_from_recrop" +output_stream: "RIGHT_HAND_TRACKING_ROI:right_hand_tracking_roi" + +# Extracts left-hand-related landmarks from the pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "left_hand_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 15 end: 16 } + ranges: { begin: 17 end: 18 } + ranges: { begin: 19 end: 20 } + combine_outputs: true + } + } +} + +# Predicts left hand landmarks. +node { + calculator: "HandLandmarksFromPoseGpu" + input_stream: "IMAGE:input_video" + input_stream: "HAND_LANDMARKS_FROM_POSE:left_hand_landmarks_from_pose" + output_stream: "HAND_LANDMARKS:left_hand_landmarks" + # Debug outputs. + output_stream: "HAND_ROI_FROM_POSE:left_hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:left_hand_roi_from_recrop" + output_stream: "HAND_TRACKING_ROI:left_hand_tracking_roi" +} + +# Extracts right-hand-related landmarks from the pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "right_hand_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 16 end: 17 } + ranges: { begin: 18 end: 19 } + ranges: { begin: 20 end: 21 } + combine_outputs: true + } + } +} + +# Extracts right-hand-related landmarks from the pose landmarks. +node { + calculator: "HandLandmarksFromPoseGpu" + input_stream: "IMAGE:input_video" + input_stream: "HAND_LANDMARKS_FROM_POSE:right_hand_landmarks_from_pose" + output_stream: "HAND_LANDMARKS:right_hand_landmarks" + # Debug outputs. + output_stream: "HAND_ROI_FROM_POSE:right_hand_roi_from_pose" + output_stream: "HAND_ROI_FROM_RECROP:right_hand_roi_from_recrop" + output_stream: "HAND_TRACKING_ROI:right_hand_tracking_roi" +} diff --git a/mediapipe/modules/holistic_landmark/hand_landmarks_to_roi.pbtxt b/mediapipe/modules/holistic_landmark/hand_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..b874c1d --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_landmarks_to_roi.pbtxt @@ -0,0 +1,57 @@ +# Converts hand landmarks to ROI. + +type: "HandLandmarksToRoi" + +# Hand landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:hand_landmarks" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI according to the hand landmarks. (NormalizedRect) +output_stream: "ROI:roi" + +# Gets hand palm landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "hand_landmarks" + output_stream: "palm_landmarks" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 4 } + ranges: { begin: 5 end: 7 } + ranges: { begin: 9 end: 11 } + ranges: { begin: 13 end: 15 } + ranges: { begin: 17 end: 19 } + combine_outputs: true + } + } +} + +# Converts the hand landmarks into a rectangle (normalized by image size) +# that encloses the hand. The calculator uses a subset of all hand landmarks +# extracted from SplitNormalizedLandmarkListCalculator above to +# calculate the bounding box and the rotation of the output rectangle. Please +# see the comments in the calculator for more detail. +node { + calculator: "HandLandmarksToRectCalculator" + input_stream: "NORM_LANDMARKS:palm_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:palm_landmarks_rect" +} + +# Expands the hand rectangle so that it's likely to contain the hand even with +# some motion. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:palm_landmarks_rect" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.0 + scale_y: 2.0 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_recrop.tflite b/mediapipe/modules/holistic_landmark/hand_recrop.tflite new file mode 100755 index 0000000..dcfd276 Binary files /dev/null and b/mediapipe/modules/holistic_landmark/hand_recrop.tflite differ diff --git a/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_cpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_cpu.pbtxt new file mode 100644 index 0000000..75141d2 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_cpu.pbtxt @@ -0,0 +1,137 @@ +# Predicts more accurate hand location (re-crop ROI) within a given ROI. + +type: "HandRecropByRoiCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# ROI (region of interest) within the given image where a palm/hand is located. +# (NormalizedRect) +input_stream: "ROI:roi" + +# Refined (more accurate) ROI to use for hand landmark prediction. +# (NormalizedRect) +output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop_refined" + +# Transforms hand ROI from the input image to a 256x256 tensor. Preserves aspect +# ratio, which results in a letterbox padding. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:input_video" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:initial_crop_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + # For OpenGL origin should be at the top left corner. + gpu_origin: TOP_LEFT, + } + } +} + +# Predicts hand re-crop rectangle. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:initial_crop_tensor" + output_stream: "TENSORS:landmark_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/holistic_landmark/hand_recrop.tflite" + delegate { xnnpack {} } + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. Two +# landmarks represent two virtual points: crop and scale of the new crop. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 2 + input_image_width: 256 + input_image_height: 256 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed hand +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (hand +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:alignment_landmarks" +} + +# Converts hand landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:alignment_landmarks" + output_stream: "DETECTION:hand_detection" +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:input_video" + output_stream: "SIZE:image_size" +} + +# Converts hand detection into a rectangle based on center and scale alignment +# points. +node { + calculator: "AlignmentPointsRectsCalculator" + input_stream: "DETECTION:hand_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:hand_roi_from_recrop" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: -90 + } + } +} + +# TODO: revise hand recrop roi calculation. +# Slighly moves hand re-crop rectangle from wrist towards fingertips. Due to the +# new hand cropping logic, crop border is to close to finger tips while a lot of +# space is below the wrist. And when moving hand up fast (with fingers pointing +# up) and using hand rect from the previous frame for tracking - fingertips can +# be cropped. This adjustment partially solves it, but hand cropping logic +# should be reviewed. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:hand_roi_from_recrop" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "hand_roi_from_recrop_refined" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.0 + scale_y: 1.0 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_gpu.pbtxt b/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_gpu.pbtxt new file mode 100644 index 0000000..4fa8f29 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_recrop_by_roi_gpu.pbtxt @@ -0,0 +1,136 @@ +# Predicts more accurate hand location (re-crop ROI) within a given ROI. + +type: "HandRecropByRoiGpu" + +# GPU image. (ImageFrame) +input_stream: "IMAGE:input_video" +# ROI (region of interest) within the given image where a palm/hand is located. +# (NormalizedRect) +input_stream: "ROI:roi" + +# Refined (more accurate) ROI to use for hand landmark prediction. +# (NormalizedRect) +output_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop_refined" + +# Transforms hand ROI from the input image to a 256x256 tensor. Preserves aspect +# ratio, which results in a letterbox padding. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:input_video" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:initial_crop_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + # For OpenGL origin should be at the top left corner. + gpu_origin: TOP_LEFT, + } + } +} + +# Predicts hand re-crop rectangle. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:initial_crop_tensor" + output_stream: "TENSORS:landmark_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/holistic_landmark/hand_recrop.tflite" + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. Two +# landmarks represent two virtual points: crop and scale of the new crop. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 2 + input_image_width: 256 + input_image_height: 256 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed hand +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (hand +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped hand image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:alignment_landmarks" +} + +# Converts hand landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:alignment_landmarks" + output_stream: "DETECTION:hand_detection" +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "SIZE:image_size" +} + +# Converts hand detection into a rectangle based on center and scale alignment +# points. +node { + calculator: "AlignmentPointsRectsCalculator" + input_stream: "DETECTION:hand_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:hand_roi_from_recrop" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: -90 + } + } +} + +# TODO: revise hand recrop roi calculation. +# Slighly moves hand re-crop rectangle from wrist towards fingertips. Due to the +# new hand cropping logic, crop border is to close to finger tips while a lot of +# space is below the wrist. And when moving hand up fast (with fingers pointing +# up) and using hand rect from the previous frame for tracking - fingertips can +# be cropped. This adjustment partially solves it, but hand cropping logic +# should be reviewed. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:hand_roi_from_recrop" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "hand_roi_from_recrop_refined" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.0 + scale_y: 1.0 + shift_y: -0.1 + square_long: true + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_tracking.pbtxt b/mediapipe/modules/holistic_landmark/hand_tracking.pbtxt new file mode 100644 index 0000000..07f734e --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_tracking.pbtxt @@ -0,0 +1,63 @@ +# Decides what ROI to use for hand landmark prediction: either previous frame +# landmarks ROI or current frame re-crop ROI. + +type: "HandTracking" + +# Hand landmarks from the current frame. They will be memorized for tracking on +# the next frame. (NormalizedLandmarkList) +input_stream: "LANDMARKS:hand_landmarks" +# Hand re-crop ROI from the current frame. (NormalizedRect) +input_stream: "HAND_ROI_FROM_RECROP:hand_roi_from_recrop" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# Hand tracking ROI. Which is either hand landmarks ROI from the previous frame +# if hand is still tracked, or hand re-crop ROI from the current frame +# othervise. (NormalizedRect) +output_stream: "HAND_TRACKING_ROI:hand_tracking_roi" + +# Keeps track of hand landmarks from the previous frame. +node { + calculator: "PreviousLoopbackCalculator" + # TODO: check that loop works with image size instead of video. + input_stream: "MAIN:image_size" + input_stream: "LOOP:hand_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_hand_landmarks" +} + +# Gets hand landarmsk rect. +node { + calculator: "HandLandmarksToRoi" + input_stream: "LANDMARKS:prev_hand_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:prev_hand_landmarks_roi" +} + +# Checks that all requirements for tracking are satisfied and use hand rectangle +# from the previous frame in that case. Otherwise - use hand re-crop rectangle +# from the current frame. +node { + calculator: "RoiTrackingCalculator" + input_stream: "PREV_LANDMARKS:prev_hand_landmarks" + input_stream: "PREV_LANDMARKS_RECT:prev_hand_landmarks_roi" + input_stream: "RECROP_RECT:hand_roi_from_recrop" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "TRACKING_RECT:hand_tracking_roi" + options: { + [mediapipe.RoiTrackingCalculatorOptions.ext] { + rect_requirements: { + rotation_degrees: 40.0 + translation: 0.2 + # TODO: adjust scale for hand tracking. + scale: 0.4 + } + landmarks_requirements: { + recrop_rect_margin: -0.1 + } + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_visibility_from_hand_landmarks_from_pose.pbtxt b/mediapipe/modules/holistic_landmark/hand_visibility_from_hand_landmarks_from_pose.pbtxt new file mode 100644 index 0000000..02db672 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_visibility_from_hand_landmarks_from_pose.pbtxt @@ -0,0 +1,44 @@ +# Determines hand visibility from the visibility prediction values in the +# hand-related pose landmarks. + +type: "HandVisibilityFromHandLandmarksFromPose" + +# Hand-related pose landmarks in [wrist, pinky, index] order. +# (NormalizedLandmarkList) +input_stream: "HAND_LANDMARKS_FROM_POSE:hand_landmarks_from_pose" + +# Hand visibility to be used as a trigger for hand landmark prediction. (bool) +output_stream: "VISIBILITY:wrist_visibility" + +# Gets pose wrist landmark. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "hand_landmarks_from_pose" + output_stream: "pose_wrist_landmark" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + } + } +} + +# TODO: Use other than pose wrist palm landmarks. +# Gets pose wrist visiblity. +node { + calculator: "LandmarkVisibilityCalculator" + input_stream: "NORM_LANDMARKS:pose_wrist_landmark" + output_stream: "VISIBILITY:wrist_visibility_score" +} + +# TODO: ensure the same threshold in rendering. +# Converts pose wrist visibility score into boolean flag. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:wrist_visibility_score" + output_stream: "FLAG:wrist_visibility" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.1 + } + } +} diff --git a/mediapipe/modules/holistic_landmark/hand_wrist_for_pose.pbtxt b/mediapipe/modules/holistic_landmark/hand_wrist_for_pose.pbtxt new file mode 100644 index 0000000..f6551bb --- /dev/null +++ b/mediapipe/modules/holistic_landmark/hand_wrist_for_pose.pbtxt @@ -0,0 +1,52 @@ +# Extracts hand wrist landmark to be used instead of pose wrist landmark. + +type: "HandWristForPose" + +# Hand landmarks to take wrist landmark from. (NormalizedLandmarkList) +input_stream: "HAND_LANDMARKS:hand_landmarks" + +# Hand wrist landmark to replace original pose wrist landmark with updated +# visibility. (NormalizedLandmarkList) +output_stream: "WRIST_LANDMARK:hand_wrist_landmark_with_visibility" + +# Side packet with constant for visibility score. As score is `x` from +# `sigmoid(x)` we pick some big value that doesn't affect pose landmarks +# visibility rendering threshold. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:visible_score_side_packet" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { float_value: 100.0 } + } + } +} + +# Converts side packet with visibility score to a stream. +node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:hand_landmarks" + input_side_packet: "visible_score_side_packet" + output_stream: "AT_TICK:visible_score" +} + +# Extracts wrist landmark from the hand landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "hand_landmarks" + output_stream: "hand_wrist_landmark" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + } + } +} + +# Sets wrist landmark visibility score. If HAND_LANDMARKS is non-empty - wrist +# will always be visible. +node { + calculator: "SetLandmarkVisibilityCalculator" + input_stream: "NORM_LANDMARKS:hand_wrist_landmark" + input_stream: "VISIBILITY:visible_score" + output_stream: "NORM_LANDMARKS:hand_wrist_landmark_with_visibility" +} diff --git a/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt b/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt new file mode 100644 index 0000000..ce86d1d --- /dev/null +++ b/mediapipe/modules/holistic_landmark/holistic_landmark_cpu.pbtxt @@ -0,0 +1,146 @@ +# Predicts pose + left/right hand + face landmarks. +# +# It is required that: +# - "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# +# - "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# +# - "hand_landmark_full.tflite" is available at +# "mediapipe/modules/hand_landmark/hand_landmark_full.tflite" +# +# - "hand_recrop.tflite" is available at +# "mediapipe/modules/holistic_landmark/hand_recrop.tflite" +# +# - "handedness.txt" is available at +# "mediapipe/modules/hand_landmark/handedness.txt" +# +# - "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# +# - "pose_landmark_lite.tflite" or "pose_landmark_full.tflite" or +# "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "HolisticLandmarkCpu" +# input_stream: "IMAGE:input_video" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" +# input_side_packet: "REFINE_FACE_LANDMARKS:refine_face_landmarks" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# output_stream: "POSE_LANDMARKS:pose_landmarks" +# output_stream: "FACE_LANDMARKS:face_landmarks" +# output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +# } +# +# NOTE: if a pose/hand/face output is not present in the image, for this +# particular timestamp there will not be an output packet in the corresponding +# output stream below. However, the MediaPipe framework will internally inform +# the downstream calculators of the absence of this packet so that they don't +# wait for it unnecessarily. + +type: "HolisticLandmarkCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether to filter landmarks across different input images to reduce jitter. +# If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Whether to filter segmentation mask across different input images to reduce +# jitter. If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + +# Whether to run the face landmark model with attention on lips and eyes to +# provide more accuracy, and additionally output iris landmarks. If unspecified, +# functions as set to false. (bool) +input_side_packet: "REFINE_FACE_LANDMARKS:refine_face_landmarks" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Pose landmarks. (NormalizedLandmarkList) +# 33 pose landmarks. +output_stream: "POSE_LANDMARKS:pose_landmarks" +# 33 pose world landmarks. (LandmarkList) +output_stream: "WORLD_LANDMARKS:pose_world_landmarks" +# 21 left hand landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# 21 right hand landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +# 468 face landmarks. (NormalizedLandmarkList) +output_stream: "FACE_LANDMARKS:face_landmarks" + +# Segmentation mask. (ImageFrame in ImageFormat::VEC32F1) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Debug outputs +output_stream: "POSE_ROI:pose_landmarks_roi" +output_stream: "POSE_DETECTION:pose_detection" + +# Predicts pose landmarks. +node { + calculator: "PoseLandmarkCpu" + input_stream: "IMAGE:image" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + output_stream: "LANDMARKS:pose_landmarks" + output_stream: "WORLD_LANDMARKS:pose_world_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" + output_stream: "ROI_FROM_LANDMARKS:pose_landmarks_roi" + output_stream: "DETECTION:pose_detection" +} + +# Predicts left and right hand landmarks based on the initial pose landmarks. +node { + calculator: "HandLandmarksLeftAndRightCpu" + input_stream: "IMAGE:image" + input_stream: "POSE_LANDMARKS:pose_landmarks" + output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +} + +# Extracts face-related pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "face_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 11 } + } + } +} + +# Predicts face landmarks based on the initial pose landmarks. +node { + calculator: "FaceLandmarksFromPoseCpu" + input_stream: "IMAGE:image" + input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + input_side_packet: "REFINE_LANDMARKS:refine_face_landmarks" + output_stream: "FACE_LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt b/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt new file mode 100644 index 0000000..33ed880 --- /dev/null +++ b/mediapipe/modules/holistic_landmark/holistic_landmark_gpu.pbtxt @@ -0,0 +1,146 @@ +# Predicts pose + left/right hand + face landmarks. +# +# It is required that: +# - "face_detection_short_range.tflite" is available at +# "mediapipe/modules/face_detection/face_detection_short_range.tflite" +# +# - "face_landmark.tflite" is available at +# "mediapipe/modules/face_landmark/face_landmark.tflite" +# +# - "hand_landmark_full.tflite" is available at +# "mediapipe/modules/hand_landmark/hand_landmark_full.tflite" +# +# - "hand_recrop.tflite" is available at +# "mediapipe/modules/holistic_landmark/hand_recrop.tflite" +# +# - "handedness.txt" is available at +# "mediapipe/modules/hand_landmark/handedness.txt" +# +# - "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# +# - "pose_landmark_lite.tflite" or "pose_landmark_full.tflite" or +# "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "HolisticLandmarkGpu" +# input_stream: "IMAGE:input_video" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" +# input_side_packet: "REFINE_FACE_LANDMARKS:refine_face_landmarks" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# output_stream: "POSE_LANDMARKS:pose_landmarks" +# output_stream: "FACE_LANDMARKS:face_landmarks" +# output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +# } +# +# NOTE: if a pose/hand/face output is not present in the image, for this +# particular timestamp there will not be an output packet in the corresponding +# output stream below. However, the MediaPipe framework will internally inform +# the downstream calculators of the absence of this packet so that they don't +# wait for it unnecessarily. + +type: "HolisticLandmarkGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether to filter landmarks across different input images to reduce jitter. +# If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Whether to filter segmentation mask across different input images to reduce +# jitter. If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + +# Whether to run the face landmark model with attention on lips and eyes to +# provide more accuracy, and additionally output iris landmarks. If unspecified, +# functions as set to false. (bool) +input_side_packet: "REFINE_FACE_LANDMARKS:refine_face_landmarks" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Pose landmarks. (NormalizedLandmarkList) +# 33 pose landmarks. +output_stream: "POSE_LANDMARKS:pose_landmarks" +# 33 pose world landmarks. (LandmarkList) +output_stream: "WORLD_LANDMARKS:pose_world_landmarks" +# 21 left hand landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" +# 21 right hand landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +# 468 face landmarks. (NormalizedLandmarkList) +output_stream: "FACE_LANDMARKS:face_landmarks" + +# Segmentation mask. (GpuBuffer in RGBA, with the same mask values in R and A) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Debug outputs +output_stream: "POSE_ROI:pose_landmarks_roi" +output_stream: "POSE_DETECTION:pose_detection" + +# Predicts pose landmarks. +node { + calculator: "PoseLandmarkGpu" + input_stream: "IMAGE:image" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + output_stream: "LANDMARKS:pose_landmarks" + output_stream: "WORLD_LANDMARKS:pose_world_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" + output_stream: "ROI_FROM_LANDMARKS:pose_landmarks_roi" + output_stream: "DETECTION:pose_detection" +} + +# Predicts left and right hand landmarks based on the initial pose landmarks. +node { + calculator: "HandLandmarksLeftAndRightGpu" + input_stream: "IMAGE:image" + input_stream: "POSE_LANDMARKS:pose_landmarks" + output_stream: "LEFT_HAND_LANDMARKS:left_hand_landmarks" + output_stream: "RIGHT_HAND_LANDMARKS:right_hand_landmarks" +} + +# Extracts face-related pose landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "pose_landmarks" + output_stream: "face_landmarks_from_pose" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 11 } + } + } +} + +# Predicts face landmarks based on the initial pose landmarks. +node { + calculator: "FaceLandmarksFromPoseGpu" + input_stream: "IMAGE:image" + input_stream: "FACE_LANDMARKS_FROM_POSE:face_landmarks_from_pose" + input_side_packet: "REFINE_LANDMARKS:refine_face_landmarks" + output_stream: "FACE_LANDMARKS:face_landmarks" +} diff --git a/mediapipe/modules/iris_landmark/BUILD b/mediapipe/modules/iris_landmark/BUILD new file mode 100644 index 0000000..e16a79b --- /dev/null +++ b/mediapipe/modules/iris_landmark/BUILD @@ -0,0 +1,103 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "iris_landmark_cpu", + graph = "iris_landmark_cpu.pbtxt", + register_as = "IrisLandmarkCpu", + deps = [ + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_cropping_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_floats_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "iris_landmark_gpu", + graph = "iris_landmark_gpu.pbtxt", + register_as = "IrisLandmarkGpu", + deps = [ + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_cropping_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_floats_calculator", + "//mediapipe/calculators/tflite:tflite_tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "iris_landmark_left_and_right_gpu", + graph = "iris_landmark_left_and_right_gpu.pbtxt", + register_as = "IrisLandmarkLeftAndRightGpu", + deps = [ + ":iris_landmark_gpu", + ":iris_landmark_landmarks_to_roi", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "iris_landmark_left_and_right_cpu", + graph = "iris_landmark_left_and_right_cpu.pbtxt", + register_as = "IrisLandmarkLeftAndRightCpu", + deps = [ + ":iris_landmark_cpu", + ":iris_landmark_landmarks_to_roi", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + ], +) + +exports_files( + srcs = [ + "iris_landmark.tflite", + ], +) + +mediapipe_simple_subgraph( + name = "iris_landmark_landmarks_to_roi", + graph = "iris_landmark_landmarks_to_roi.pbtxt", + register_as = "IrisLandmarkLandmarksToRoi", + deps = [ + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) diff --git a/mediapipe/modules/iris_landmark/README.md b/mediapipe/modules/iris_landmark/README.md new file mode 100644 index 0000000..f99fcee --- /dev/null +++ b/mediapipe/modules/iris_landmark/README.md @@ -0,0 +1,8 @@ +# iris_landmark + +Subgraphs|Details +:--- | :--- +[`IrisLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt)| Detects iris landmarks for left or right eye. (CPU input, and inference is executed on CPU.) +[`IrisLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt)| Detects iris landmarks for left or right eye. (GPU input, and inference is executed on GPU) +[`IrisLandmarkLeftAndRightCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt)| Detects iris landmarks for both left and right eyes. (CPU input, and inference is executed on CPU) +[`IrisLandmarkLeftAndRightGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt)| Detects iris landmarks for both left and right eyes. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/iris_landmark/iris_landmark.tflite b/mediapipe/modules/iris_landmark/iris_landmark.tflite new file mode 100755 index 0000000..974b910 Binary files /dev/null and b/mediapipe/modules/iris_landmark/iris_landmark.tflite differ diff --git a/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt b/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt new file mode 100644 index 0000000..f2c4b04 --- /dev/null +++ b/mediapipe/modules/iris_landmark/iris_landmark_cpu.pbtxt @@ -0,0 +1,156 @@ +# MediaPipe subgraph to calculate iris landmarks and eye contour landmarks for +# a single eye. (CPU input, and inference is executed on CPU.) +# +# It is required that "iris_landmark.tflite" is available at +# "mediapipe/modules/iris_landmark/iris_landmark.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "IrisLandmarkCpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:eye_roi" +# input_stream: "IS_RIGHT_EYE:is_right_eye" +# output_stream: "EYE_CONTOUR_LANDMARKS:eye_contour_landmarks" +# output_stream: "IRIS_LANDMARKS:iris_landmarks" +# } + +type: "IrisLandmarkCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where an eye is located. +# (NormalizedRect) +input_stream: "ROI:roi" +# Is right eye. (bool) +# (Model is trained to detect left eye landmarks only, hence for right eye, +# flipping is required to immitate left eye.) +input_stream: "IS_RIGHT_EYE:is_right_eye" + +# 71 refined normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "EYE_CONTOUR_LANDMARKS:projected_eye_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "IRIS_LANDMARKS:projected_iris_landmarks" + +node { + calculator: "ImageCroppingCalculator" + input_stream: "IMAGE:image" + input_stream: "NORM_RECT:roi" + output_stream: "IMAGE:eye_image" + options: { + [mediapipe.ImageCroppingCalculatorOptions.ext] { + border_mode: BORDER_REPLICATE + } + } +} + +node { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:eye_image" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "IMAGE:transformed_eye_image" + output_stream: "LETTERBOX_PADDING:eye_letterbox_padding" + options: { + [mediapipe.ImageTransformationCalculatorOptions.ext] { + output_width: 64 + output_height: 64 + scale_mode: FIT + } + } +} + +# Converts the transformed input image on CPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE:transformed_eye_image" + output_stream: "TENSORS:image_tensor" + options: { + [mediapipe.TfLiteConverterCalculatorOptions.ext] { + zero_center: false + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.TfLiteInferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/iris_landmark/iris_landmark.tflite" + delegate { xnnpack {} } + } + } +} + +# Splits a vector of TFLite tensors to multiple vectors according to the ranges +# specified in option. +node { + calculator: "SplitTfLiteTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "eye_landmarks_tensor" + output_stream: "iris_landmarks_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TfLiteTensorsToLandmarksCalculator" + input_stream: "TENSORS:iris_landmarks_tensor" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "NORM_LANDMARKS:iris_landmarks" + options: { + [mediapipe.TfLiteTensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 5 + input_image_width: 64 + input_image_height: 64 + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TfLiteTensorsToLandmarksCalculator" + input_stream: "TENSORS:eye_landmarks_tensor" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "NORM_LANDMARKS:eye_landmarks" + options: { + [mediapipe.TfLiteTensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 71 + input_image_width: 64 + input_image_height: 64 + } + } +} + +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:0:iris_landmarks" + input_stream: "LANDMARKS:1:eye_landmarks" + input_stream: "LETTERBOX_PADDING:eye_letterbox_padding" + output_stream: "LANDMARKS:0:padded_iris_landmarks" + output_stream: "LANDMARKS:1:padded_eye_landmarks" +} + +# Projects the landmarks from the cropped face image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:0:padded_iris_landmarks" + input_stream: "NORM_LANDMARKS:1:padded_eye_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:0:projected_iris_landmarks" + output_stream: "NORM_LANDMARKS:1:projected_eye_landmarks" +} + diff --git a/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt b/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt new file mode 100644 index 0000000..9fb7898 --- /dev/null +++ b/mediapipe/modules/iris_landmark/iris_landmark_gpu.pbtxt @@ -0,0 +1,162 @@ +# MediaPipe subgraph to calculate iris landmarks and eye contour landmarks for +# a single eye. (GPU input, and inference is executed on GPU.) +# +# It is required that "iris_landmark.tflite" is available at +# "mediapipe/modules/iris_landmark/iris_landmark.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "IrisLandmarkGpu" +# input_stream: "IMAGE:image" +# input_stream: "ROI:eye_roi" +# input_stream: "IS_RIGHT_EYE:is_right_eye" +# output_stream: "EYE_CONTOUR_LANDMARKS:eye_contour_landmarks" +# output_stream: "IRIS_LANDMARKS:iris_landmarks" +# } + +type: "IrisLandmarkGpu" + +# GPU buffer. (GpuBuffer) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where an eye is located. +# (NormalizedRect) +input_stream: "ROI:roi" +# Is right eye. (bool) +# (Model is trained to detect left eye landmarks only, hence for right eye, +# flipping is required to immitate left eye.) +input_stream: "IS_RIGHT_EYE:is_right_eye" + +# TfLite model to detect iris landmarks. +# (std::unique_ptr>) +# NOTE: currently, mediapipe/modules/iris_landmark/iris_landmark.tflite model +# only, can be passed here, otherwise - results are undefined. +input_side_packet: "MODEL:model" + +# 71 refined normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "EYE_CONTOUR_LANDMARKS:projected_eye_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "IRIS_LANDMARKS:projected_iris_landmarks" + +node { + calculator: "ImageCroppingCalculator" + input_stream: "IMAGE_GPU:image" + input_stream: "NORM_RECT:roi" + output_stream: "IMAGE_GPU:eye_image" + options: { + [mediapipe.ImageCroppingCalculatorOptions.ext] { + border_mode: BORDER_REPLICATE + } + } +} + +node { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:eye_image" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "IMAGE_GPU:transformed_eye_image" + output_stream: "LETTERBOX_PADDING:eye_letterbox_padding" + options: { + [mediapipe.ImageTransformationCalculatorOptions.ext] { + output_width: 64 + output_height: 64 + scale_mode: FIT + } + } +} + +# Converts the transformed input image on CPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE_GPU:transformed_eye_image" + output_stream: "TENSORS_GPU:image_tensor" + options: { + [mediapipe.TfLiteConverterCalculatorOptions.ext] { + zero_center: false + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS_GPU:image_tensor" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.TfLiteInferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/iris_landmark/iris_landmark.tflite" + } + } +} + +# Splits a vector of TFLite tensors to multiple vectors according to the ranges +# specified in option. +node { + calculator: "SplitTfLiteTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "eye_landmarks_tensor" + output_stream: "iris_landmarks_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TfLiteTensorsToLandmarksCalculator" + input_stream: "TENSORS:iris_landmarks_tensor" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "NORM_LANDMARKS:iris_landmarks" + options: { + [mediapipe.TfLiteTensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 5 + input_image_width: 64 + input_image_height: 64 + } + } +} + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TfLiteTensorsToLandmarksCalculator" + input_stream: "TENSORS:eye_landmarks_tensor" + input_stream: "FLIP_HORIZONTALLY:is_right_eye" + output_stream: "NORM_LANDMARKS:eye_landmarks" + options: { + [mediapipe.TfLiteTensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 71 + input_image_width: 64 + input_image_height: 64 + } + } +} + +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:0:iris_landmarks" + input_stream: "LANDMARKS:1:eye_landmarks" + input_stream: "LETTERBOX_PADDING:eye_letterbox_padding" + output_stream: "LANDMARKS:0:padded_iris_landmarks" + output_stream: "LANDMARKS:1:padded_eye_landmarks" +} + +# Projects the landmarks from the cropped face image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:0:padded_iris_landmarks" + input_stream: "NORM_LANDMARKS:1:padded_eye_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:0:projected_iris_landmarks" + output_stream: "NORM_LANDMARKS:1:projected_eye_landmarks" +} + diff --git a/mediapipe/modules/iris_landmark/iris_landmark_landmarks_to_roi.pbtxt b/mediapipe/modules/iris_landmark/iris_landmark_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..fc53a16 --- /dev/null +++ b/mediapipe/modules/iris_landmark/iris_landmark_landmarks_to_roi.pbtxt @@ -0,0 +1,50 @@ +# MediaPipe subgraph to calculate region of interest (ROI) which is then can +# be used to calculate iris landmarks and eye contour landmarks. +# +# NOTE: this graph is subject to change and should not be used directly. + +type: "IrisLandmarkLandmarksToRoi" + +# List of two normalized landmarks: left and right corners of an eye. +# (NormalizedLandmarkList) +input_stream: "LANDMARKS:landmarks" +# Image size. (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI (region of interest) within the given image where an eye is located. +# (NormalizedRect) +output_stream: "ROI:roi" + +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "DETECTION:detection" +} + +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTION:detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:raw_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: 0 + } + } +} + +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:raw_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2.3 + scale_y: 2.3 + square_long: true + } + } +} diff --git a/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt b/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt new file mode 100644 index 0000000..7fb72de --- /dev/null +++ b/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_cpu.pbtxt @@ -0,0 +1,120 @@ +# MediaPipe subgraph to calculate iris landmarks and eye contour landmarks for +# two eyes: left and right. (CPU input, and inference is executed on CPU.) +# +# It is required that "iris_landmark.tflite" is available at +# "mediapipe/modules/iris_landmark/iris_landmark.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "IrisLandmarkLeftAndRightCpu" +# input_stream: "IMAGE:image" +# input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" +# input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" +# output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" +# output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" +# output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" +# output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" +# } + +type: "IrisLandmarkLeftAndRightCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" +# List of two landmarks defining LEFT eye boundaries - left and right corners. +# (NormalizedLandmarkList) +input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" +# List of two landmarks defining RIGHT eye boundaries - left and right corners. +# (NormalizedLandmarkList) +input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + +# 71 normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" +# Region of interest used to do calculations for the left eye. (NormalizedRect) +output_stream: "LEFT_EYE_ROI:left_eye_roi" + +# 71 normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" +# Region of interest used to do calculations for the right eye. (NormalizedRect) +output_stream: "RIGHT_EYE_ROI:right_eye_roi" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image" + output_stream: "SIZE:image_size" +} + +### Processing left eye ### + +node { + calculator: "IrisLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:left_eye_boundary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:left_eye_roi" +} + +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:left_eye_flag_side_packet" + options { + [mediapipe.ConstantSidePacketCalculatorOptions.ext] { + packet { bool_value: false } + } + } +} + +node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:image" + input_side_packet: "left_eye_flag_side_packet" + output_stream: "AT_TICK:left_eye_flag" +} + +node { + calculator: "IrisLandmarkCpu" + input_stream: "IMAGE:image" + input_stream: "ROI:left_eye_roi" + input_stream: "IS_RIGHT_EYE:left_eye_flag" + output_stream: "EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "IRIS_LANDMARKS:left_iris_landmarks" +} + +### Processing right eye ### + +node { + calculator: "IrisLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:right_eye_boundary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:right_eye_roi" +} + +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:right_eye_flag_side_packet" + options { + [mediapipe.ConstantSidePacketCalculatorOptions.ext] { + packet { bool_value: true } + } + } +} + +node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:image" + input_side_packet: "right_eye_flag_side_packet" + output_stream: "AT_TICK:right_eye_flag" +} + +node { + calculator: "IrisLandmarkCpu" + input_stream: "IMAGE:image" + input_stream: "ROI:right_eye_roi" + input_stream: "IS_RIGHT_EYE:right_eye_flag" + output_stream: "EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "IRIS_LANDMARKS:right_iris_landmarks" +} + diff --git a/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt b/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt new file mode 100644 index 0000000..eeff026 --- /dev/null +++ b/mediapipe/modules/iris_landmark/iris_landmark_left_and_right_gpu.pbtxt @@ -0,0 +1,120 @@ +# MediaPipe subgraph to calculate iris landmarks and eye contour landmarks for +# two eyes: left and right. (GPU input, and inference is executed on GPU.) +# +# It is required that "iris_landmark.tflite" is available at +# "mediapipe/modules/iris_landmark/iris_landmark.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "IrisLandmarkLeftAndRightGpu" +# input_stream: "IMAGE:image" +# input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" +# input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" +# output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" +# output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" +# output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" +# output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" +# } + +type: "IrisLandmarkLeftAndRightGpu" + +# GPU buffer. (GpuBuffer) +input_stream: "IMAGE:image" +# List of two landmarks defining LEFT eye boundaries - left and right corners. +# (NormalizedLandmarkList) +input_stream: "LEFT_EYE_BOUNDARY_LANDMARKS:left_eye_boundary_landmarks" +# List of two landmarks defining RIGHT eye boundaries - left and right corners. +# (NormalizedLandmarkList) +input_stream: "RIGHT_EYE_BOUNDARY_LANDMARKS:right_eye_boundary_landmarks" + +# 71 normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "LEFT_EYE_IRIS_LANDMARKS:left_iris_landmarks" +# Region of interest used to do calculations for the left eye. (NormalizedRect) +output_stream: "LEFT_EYE_ROI:left_eye_roi" + +# 71 normalized eye contour landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" +# 5 normalized iris landmarks. (NormalizedLandmarkList) +output_stream: "RIGHT_EYE_IRIS_LANDMARKS:right_iris_landmarks" +# Region of interest used to do calculations for the right eye. (NormalizedRect) +output_stream: "RIGHT_EYE_ROI:right_eye_roi" + +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +### Processing left eye ### + +node { + calculator: "IrisLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:left_eye_boundary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:left_eye_roi" +} + +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:left_eye_flag_side_packet" + options { + [mediapipe.ConstantSidePacketCalculatorOptions.ext] { + packet { bool_value: false } + } + } +} + +node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:image" + input_side_packet: "left_eye_flag_side_packet" + output_stream: "AT_TICK:left_eye_flag" +} + +node { + calculator: "IrisLandmarkGpu" + input_stream: "IMAGE:image" + input_stream: "ROI:left_eye_roi" + input_stream: "IS_RIGHT_EYE:left_eye_flag" + output_stream: "EYE_CONTOUR_LANDMARKS:left_eye_contour_landmarks" + output_stream: "IRIS_LANDMARKS:left_iris_landmarks" +} + +### Processing right eye ### + +node { + calculator: "IrisLandmarkLandmarksToRoi" + input_stream: "LANDMARKS:right_eye_boundary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:right_eye_roi" +} + +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:right_eye_flag_side_packet" + options { + [mediapipe.ConstantSidePacketCalculatorOptions.ext] { + packet { bool_value: true } + } + } +} + +node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:image" + input_side_packet: "right_eye_flag_side_packet" + output_stream: "AT_TICK:right_eye_flag" +} + +node { + calculator: "IrisLandmarkGpu" + input_stream: "IMAGE:image" + input_stream: "ROI:right_eye_roi" + input_stream: "IS_RIGHT_EYE:right_eye_flag" + output_stream: "EYE_CONTOUR_LANDMARKS:right_eye_contour_landmarks" + output_stream: "IRIS_LANDMARKS:right_iris_landmarks" +} + diff --git a/mediapipe/modules/objectron/BUILD b/mediapipe/modules/objectron/BUILD new file mode 100644 index 0000000..cee5768 --- /dev/null +++ b/mediapipe/modules/objectron/BUILD @@ -0,0 +1,183 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "object_detection_3d_camera.tflite", + "object_detection_3d_chair.tflite", + "object_detection_3d_chair_1stage.tflite", + "object_detection_3d_cup.tflite", + "object_detection_3d_sneakers.tflite", + "object_detection_3d_sneakers_1stage.tflite", + "object_detection_oidv4_labelmap.txt", + "object_detection_ssd_mobilenetv2_oidv4_fp16.tflite", +]) + +mediapipe_simple_subgraph( + name = "objectron_detection_1stage_gpu", + graph = "objectron_detection_1stage_gpu.pbtxt", + register_as = "ObjectronDetection1StageSubgraphGpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/tflite:tflite_converter_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/tflite:tflite_inference_calculator", + "//mediapipe/modules/objectron/calculators:tflite_tensors_to_objects_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "objectron_tracking_1stage_gpu", + graph = "objectron_tracking_1stage_gpu.pbtxt", + register_as = "ObjectronTracking1StageSubgraphGpu", + deps = [ + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/video:box_tracker_calculator", + "//mediapipe/calculators/video:flow_packager_calculator", + "//mediapipe/calculators/video:motion_analysis_calculator", + "//mediapipe/framework/stream_handler:sync_set_input_stream_handler", + "//mediapipe/gpu:gpu_buffer_to_image_frame_calculator", + "//mediapipe/modules/objectron/calculators:frame_annotation_to_timed_box_list_calculator", + "//mediapipe/modules/objectron/calculators:frame_annotation_tracker_calculator", + "//mediapipe/modules/objectron/calculators:lift_2d_frame_annotation_to_3d_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "box_landmark_gpu", + graph = "box_landmark_gpu.pbtxt", + register_as = "BoxLandmarkSubgraph", + deps = [ + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "box_landmark_cpu", + graph = "box_landmark_cpu.pbtxt", + register_as = "BoxLandmarkSubgraph", + deps = [ + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "object_detection_oid_v4_gpu", + graph = "object_detection_oid_v4_gpu.pbtxt", + register_as = "ObjectDetectionOidV4Subgraph", + deps = [ + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + "//mediapipe/modules/objectron/calculators:filter_detection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "object_detection_oid_v4_cpu", + graph = "object_detection_oid_v4_cpu.pbtxt", + register_as = "ObjectDetectionOidV4Subgraph", + deps = [ + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + "//mediapipe/modules/objectron/calculators:filter_detection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "objectron_cpu", + graph = "objectron_cpu.pbtxt", + register_as = "ObjectronCpuSubgraph", + deps = [ + ":box_landmark_cpu", + ":object_detection_oid_v4_cpu", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/modules/objectron/calculators:frame_annotation_to_rect_calculator", + "//mediapipe/modules/objectron/calculators:landmarks_to_frame_annotation_calculator", + "//mediapipe/modules/objectron/calculators:lift_2d_frame_annotation_to_3d_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "objectron_gpu", + graph = "objectron_gpu.pbtxt", + register_as = "ObjectronGpuSubgraph", + deps = [ + ":box_landmark_gpu", + ":object_detection_oid_v4_gpu", + "//mediapipe/calculators/core:begin_loop_calculator", + "//mediapipe/calculators/core:clip_vector_size_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:end_loop_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:association_norm_rect_calculator", + "//mediapipe/calculators/util:collection_has_min_size_calculator", + "//mediapipe/calculators/util:detections_to_rects_calculator", + "//mediapipe/modules/objectron/calculators:frame_annotation_to_rect_calculator", + "//mediapipe/modules/objectron/calculators:landmarks_to_frame_annotation_calculator", + "//mediapipe/modules/objectron/calculators:lift_2d_frame_annotation_to_3d_calculator", + ], +) diff --git a/mediapipe/modules/objectron/README.md b/mediapipe/modules/objectron/README.md new file mode 100644 index 0000000..00883fe --- /dev/null +++ b/mediapipe/modules/objectron/README.md @@ -0,0 +1,6 @@ +# objectron + +Subgraphs|Details +:--- | :--- +[`ObjectronCpuSubgraph`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_cpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (CPU input, and inference is executed on CPU.) +[`ObjectronGpuSubgraph`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/objectron_gpu.pbtxt)| Detects and tracks 3D bounding boxes for objects. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/objectron/box_landmark_cpu.pbtxt b/mediapipe/modules/objectron/box_landmark_cpu.pbtxt new file mode 100644 index 0000000..bb638d1 --- /dev/null +++ b/mediapipe/modules/objectron/box_landmark_cpu.pbtxt @@ -0,0 +1,147 @@ +# MediaPipe Box landmark localization CPU subgraph. + +type: "BoxLandmarkSubgraph" + +input_stream: "IMAGE:image" +input_stream: "NORM_RECT:box_rect" +input_side_packet: "MODEL:model" +output_stream: "NORM_LANDMARKS:box_landmarks" + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image" + output_stream: "SIZE:image_size" +} + +# Expands the rectangle that contain the box so that it's likely to cover the +# entire box. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:box_rect" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "box_rect_scaled" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.5 + scale_y: 1.5 + square_long: true + } + } +} + +# Crops, resizes, and converts the input video into tensor. +# Preserves aspect ratio of the images. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + input_stream: "NORM_RECT:box_rect_scaled" + output_stream: "TENSORS:image_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + border_mode: BORDER_REPLICATE + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:image_tensor" + input_side_packet: "MODEL:model" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { xnnpack {} } + } + } +} + +# Splits a vector of tensors into multiple vectors. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "box_flag_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } +} + +# Converts the box-flag tensor into a float that represents the confidence +# score of box presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:box_flag_tensor" + output_stream: "FLOAT:box_presence_score" +} + +# Applies a threshold to the confidence score to determine whether a box is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:box_presence_score" + output_stream: "FLAG:box_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.99 + } + } +} + +# Drops landmarks tensors if box is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:box_presence" + output_stream: "gated_landmark_tensors" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:gated_landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 9 + input_image_width: 224 + input_image_height: 224 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed box +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (box +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped box image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:box_rect_scaled" + output_stream: "NORM_LANDMARKS:box_landmarks" +} diff --git a/mediapipe/modules/objectron/box_landmark_gpu.pbtxt b/mediapipe/modules/objectron/box_landmark_gpu.pbtxt new file mode 100644 index 0000000..ac95880 --- /dev/null +++ b/mediapipe/modules/objectron/box_landmark_gpu.pbtxt @@ -0,0 +1,147 @@ +# MediaPipe Box landmark localization GPU subgraph. + +type: "BoxLandmarkSubgraph" + +input_stream: "IMAGE:image" +input_stream: "NORM_RECT:box_rect" +output_stream: "NORM_LANDMARKS:box_landmarks" + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Expands the rectangle that contain the box so that it's likely to cover the +# entire box. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:box_rect" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "box_rect_scaled" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.5 + scale_y: 1.5 + square_long: true + } + } +} + +# Crops, resizes, and converts the input video into tensor. +# Preserves aspect ratio of the images. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + input_stream: "NORM_RECT:box_rect_scaled" + output_stream: "TENSORS:image_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + border_mode: BORDER_REPLICATE + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "object_detection_3d.tflite" + delegate { gpu {} } + } + } +} + +# Splits a vector of tensors to multiple vectors according to the ranges +# specified in option. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "output_tensors" + output_stream: "landmark_tensors" + output_stream: "box_flag_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + } + } +} + +# Converts the box-flag tensor into a float that represents the confidence +# score of box presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:box_flag_tensor" + output_stream: "FLOAT:box_presence_score" +} + +# Applies a threshold to the confidence score to determine whether a box is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:box_presence_score" + output_stream: "FLAG:box_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.99 + } + } +} + +# Drops landmarks tensors if box is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensors" + input_stream: "ALLOW:box_presence" + output_stream: "gated_landmark_tensors" +} + +# Decodes the landmark tensors into a list of landmarks, where the landmark +# coordinates are normalized by the size of the input image to the model. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:gated_landmark_tensors" + output_stream: "NORM_LANDMARKS:landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 9 + input_image_width: 224 + input_image_height: 224 + } + } +} + +# Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed box +# image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (box +# image before image transformation). +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:scaled_landmarks" +} + +# Projects the landmarks from the cropped box image to the corresponding +# locations on the full image before cropping (input to the graph). +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:scaled_landmarks" + input_stream: "NORM_RECT:box_rect_scaled" + output_stream: "NORM_LANDMARKS:box_landmarks" +} diff --git a/mediapipe/modules/objectron/calculators/BUILD b/mediapipe/modules/objectron/calculators/BUILD new file mode 100644 index 0000000..fb75eb3 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/BUILD @@ -0,0 +1,407 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") +load("//mediapipe/framework:mediapipe_register_type.bzl", "mediapipe_register_type") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_proto_library( + name = "object_proto", + srcs = ["object.proto"], + visibility = ["//visibility:public"], +) + +mediapipe_proto_library( + name = "a_r_capture_metadata_proto", + srcs = ["a_r_capture_metadata.proto"], + visibility = ["//visibility:public"], +) + +mediapipe_proto_library( + name = "annotation_proto", + srcs = ["annotation_data.proto"], + def_options_lib = False, + visibility = ["//visibility:public"], + deps = [ + ":a_r_capture_metadata_proto", + ":object_proto", + ], +) + +mediapipe_register_type( + base_name = "annotation", + include_headers = ["mediapipe/modules/objectron/calculators/annotation_data.pb.h"], + types = [ + "::mediapipe::FrameAnnotation", + ], + deps = [":annotation_cc_proto"], +) + +mediapipe_proto_library( + name = "camera_parameters_proto", + srcs = ["camera_parameters.proto"], + visibility = ["//visibility:public"], +) + +mediapipe_proto_library( + name = "frame_annotation_tracker_calculator_proto", + srcs = ["frame_annotation_tracker_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "belief_decoder_config_proto", + srcs = ["belief_decoder_config.proto"], + visibility = ["//visibility:public"], +) + +mediapipe_proto_library( + name = "tflite_tensors_to_objects_calculator_proto", + srcs = ["tflite_tensors_to_objects_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + ":belief_decoder_config_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "tensors_to_objects_calculator_proto", + srcs = ["tensors_to_objects_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + ":belief_decoder_config_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "lift_2d_frame_annotation_to_3d_calculator_proto", + srcs = ["lift_2d_frame_annotation_to_3d_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + ":belief_decoder_config_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "frame_annotation_to_rect_calculator_proto", + srcs = ["frame_annotation_to_rect_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "filter_detection_calculator_proto", + srcs = ["filter_detection_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +cc_library( + name = "box_util", + srcs = ["box_util.cc"], + hdrs = ["box_util.h"], + deps = [ + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/util/tracking:box_tracker_cc_proto", + ], +) + +cc_library( + name = "frame_annotation_tracker", + srcs = ["frame_annotation_tracker.cc"], + hdrs = ["frame_annotation_tracker.h"], + deps = [ + ":annotation_cc_proto", + ":box_util", + "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:logging", + "//mediapipe/util/tracking:box_tracker_cc_proto", + "@com_google_absl//absl/container:btree", + "@com_google_absl//absl/container:flat_hash_set", + ], +) + +cc_library( + name = "epnp", + srcs = [ + "epnp.cc", + ], + hdrs = [ + "epnp.h", + ], + deps = [ + "//mediapipe/framework/port:logging", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings:str_format", + "@eigen_archive//:eigen3", + ], +) + +cc_library( + name = "decoder", + srcs = [ + "decoder.cc", + ], + hdrs = [ + "decoder.h", + ], + deps = [ + ":annotation_cc_proto", + ":belief_decoder_config_cc_proto", + ":box", + ":epnp", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/status", + "@eigen_archive//:eigen3", + ], +) + +cc_library( + name = "tensor_util", + srcs = [ + "tensor_util.cc", + ], + hdrs = [ + "tensor_util.h", + ], + deps = [ + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:opencv_core", + "@org_tensorflow//tensorflow/lite:framework", + ], +) + +cc_library( + name = "box", + srcs = [ + "box.cc", + "model.cc", + ], + hdrs = [ + "box.h", + "model.h", + "types.h", + ], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":object_cc_proto", + "//mediapipe/framework/port:logging", + "@eigen_archive//:eigen3", + ], +) + +cc_library( + name = "frame_annotation_to_timed_box_list_calculator", + srcs = ["frame_annotation_to_timed_box_list_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":box_util", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/util/tracking:box_tracker_cc_proto", + "@com_google_absl//absl/memory", + ], + alwayslink = 1, +) + +cc_library( + name = "frame_annotation_tracker_calculator", + srcs = ["frame_annotation_tracker_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":frame_annotation_tracker", + ":frame_annotation_tracker_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/util/tracking:box_tracker_cc_proto", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/memory", + ], + alwayslink = 1, +) + +cc_library( + name = "tflite_tensors_to_objects_calculator", + srcs = ["tflite_tensors_to_objects_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":belief_decoder_config_cc_proto", + ":decoder", + ":tensor_util", + ":tflite_tensors_to_objects_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", + "@eigen_archive//:eigen3", + "@org_tensorflow//tensorflow/lite:framework", + ], + alwayslink = 1, +) + +cc_library( + name = "tensors_to_objects_calculator", + srcs = ["tensors_to_objects_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":belief_decoder_config_cc_proto", + ":decoder", + ":tensor_util", + ":tensors_to_objects_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", + "@eigen_archive//:eigen3", + ], + alwayslink = 1, +) + +cc_library( + name = "lift_2d_frame_annotation_to_3d_calculator", + srcs = ["lift_2d_frame_annotation_to_3d_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":annotation_cc_proto", + ":belief_decoder_config_cc_proto", + ":decoder", + ":lift_2d_frame_annotation_to_3d_calculator_cc_proto", + ":tensor_util", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", + "@eigen_archive//:eigen3", + "@org_tensorflow//tensorflow/lite:framework", + ], + alwayslink = 1, +) + +cc_library( + name = "frame_annotation_to_rect_calculator", + srcs = ["frame_annotation_to_rect_calculator.cc"], + deps = [ + ":annotation_cc_proto", + ":frame_annotation_to_rect_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/memory", + "@eigen_archive//:eigen3", + ], + alwayslink = 1, +) + +cc_library( + name = "landmarks_to_frame_annotation_calculator", + srcs = ["landmarks_to_frame_annotation_calculator.cc"], + deps = [ + ":annotation_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/memory", + ], + alwayslink = 1, +) + +cc_library( + name = "filter_detection_calculator", + srcs = ["filter_detection_calculator.cc"], + deps = [ + ":filter_detection_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/formats:location_data_cc_proto", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:map_util", + "//mediapipe/framework/port:re2", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_test( + name = "box_util_test", + srcs = ["box_util_test.cc"], + deps = [ + ":box_util", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/util/tracking:box_tracker_cc_proto", + ], +) + +cc_test( + name = "frame_annotation_tracker_test", + srcs = ["frame_annotation_tracker_test.cc"], + deps = [ + ":annotation_cc_proto", + ":frame_annotation_tracker", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:logging", + "//mediapipe/util/tracking:box_tracker_cc_proto", + "@com_google_absl//absl/container:flat_hash_set", + ], +) diff --git a/mediapipe/modules/objectron/calculators/a_r_capture_metadata.proto b/mediapipe/modules/objectron/calculators/a_r_capture_metadata.proto new file mode 100644 index 0000000..edc8c4b --- /dev/null +++ b/mediapipe/modules/objectron/calculators/a_r_capture_metadata.proto @@ -0,0 +1,551 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +// Info about the camera characteristics used to capture images and depth data. +// See developer.apple.com/documentation/avfoundation/avcameracalibrationdata +// for more information. +message AVCameraCalibrationData { + // 3x3 row-major matrix relating a camera's internal properties to an ideal + // pinhole-camera model. + // See + // developer.apple.com/documentation/avfoundation/avcameracalibrationdata/2881135-intrinsicmatrix + // for detailed usage information. + repeated float intrinsic_matrix = 1 [packed = true]; + + // The image dimensions to which the intrinsic_matrix values are relative. + optional float intrinsic_matrix_reference_dimension_width = 2; + optional float intrinsic_matrix_reference_dimension_height = 3; + + // 3x4 row-major matrix relating a camera's position and orientation to a + // world or scene coordinate system. Consists of a unitless 3x3 rotation + // matrix (R) on the left and a translation (t) 3x1 vector on the right. The + // translation vector's units are millimeters. For example: + // + // |r1,1 r2,1 r3,1 | t1| + // [R | t] = |r1,2 r2,2 r3,2 | t2| + // |r1,3 r2,3 r3,3 | t3| + // + // is stored as [r11, r21, r31, t1, r12, r22, r32, t2, ...] + // + // See + // developer.apple.com/documentation/avfoundation/avcameracalibrationdata/2881130-extrinsicmatrix?language=objc + // for more information. + repeated float extrinsic_matrix = 4 [packed = true]; + + // The size, in millimeters, of one image pixel. + optional float pixel_size = 5; + + // A list of floating-point values describing radial distortions imparted by + // the camera lens, for use in rectifying camera images. + // See + // developer.apple.com/documentation/avfoundation/avcameracalibrationdata/2881129-lensdistortionlookuptable?language=objc + // for more information. + repeated float lens_distortion_lookup_values = 6 [packed = true]; + + // A list of floating-point values describing radial distortions for use in + // reapplying camera geometry to a rectified image. + // See + // developer.apple.com/documentation/avfoundation/avcameracalibrationdata/2881132-inverselensdistortionlookuptable?language=objc + // for more information. + repeated float inverse_lens_distortion_lookup_values = 7 [packed = true]; + + // The offset of the distortion center of the camera lens from the top-left + // corner of the image. + // See + // developer.apple.com/documentation/avfoundation/avcameracalibrationdata/2881131-lensdistortioncenter?language=objc + // for more information. + optional float lens_distortion_center_x = 8; + optional float lens_distortion_center_y = 9; +} + +// Container for depth data information. +// See developer.apple.com/documentation/avfoundation/avdepthdata for more info. +message AVDepthData { + // PNG representation of the grayscale depth data map. See discussion about + // depth_data_map_original_minimum_value, below, for information about how + // to interpret the pixel values. + optional bytes depth_data_map = 1; + + // Pixel format type of the original captured depth data. + // See + // developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers?language=objc + // for the complete list of possible pixel format types. This value represents + // a string for the associated OSType/FourCharCode. + optional string depth_data_type = 2; + + // Indicates the general accuracy of the depth_data_map. + // See developer.apple.com/documentation/avfoundation/avdepthdataaccuracy for + // more information. + enum Accuracy { + UNDEFINED_ACCURACY = 0; + // Values in the depth map are usable for foreground/background separation + // but are not absolutely accurate in the physical world. + RELATIVE = 1; + // Values in the depth map are absolutely accurate in the physical world. + ABSOLUTE = 2; + } + optional Accuracy depth_data_accuracy = 3 [default = RELATIVE]; + + // Indicates whether the depth_data_map contains temporally smoothed data. + optional bool depth_data_filtered = 4; + + // Quality of the depth_data_map. + enum Quality { + UNDEFINED_QUALITY = 0; + HIGH = 1; + LOW = 2; + } + optional Quality depth_data_quality = 5; + + // Associated calibration data for the depth_data_map. + optional AVCameraCalibrationData camera_calibration_data = 6; + + // The original range of values expressed by the depth_data_map, before + // grayscale normalization. For example, if the minimum and maximum values + // indicate a range of [0.5, 2.2], and the depth_data_type value indicates + // it was a depth map, then white pixels (255, 255, 255) will map to 0.5 and + // black pixels (0, 0, 0) will map to 2.2 with the grayscale range linearly + // interpolated inbetween. Conversely, if the depth_data_type value indicates + // it was a disparity map, then white pixels will map to 2.2 and black pixels + // will map to 0.5. + optional float depth_data_map_original_minimum_value = 7; + optional float depth_data_map_original_maximum_value = 8; + + // The width of the depth buffer map. + optional int32 depth_data_map_width = 9; + + // The height of the depth buffer map. + optional int32 depth_data_map_height = 10; + + // The row-major flattened array of the depth buffer map pixels. This will be + // either a float32 or float16 byte array, depending on 'depth_data_type'. + optional bytes depth_data_map_raw_values = 11; +} + +// Estimated scene lighting information associated with a captured video frame. +// See developer.apple.com/documentation/arkit/arlightestimate for more info. +message ARLightEstimate { + // The estimated intensity, in lumens, of ambient light throughout the scene. + optional double ambient_intensity = 1; + + // The estimated color temperature, in degrees Kelvin, of ambient light + // throughout the scene. + optional double ambient_color_temperature = 2; + + // Data describing the estimated lighting environment in all directions. + // Second-level spherical harmonics in separate red, green, and blue data + // planes. Thus, this buffer contains 3 sets of 9 coefficients, or a total of + // 27 values. + // See + // https://developer.apple.com/documentation/arkit/ardirectionallightestimate/2928222-sphericalharmonicscoefficients?language=objc + // for more information. + repeated float spherical_harmonics_coefficients = 3 [packed = true]; + + message DirectionVector { + optional float x = 1; + optional float y = 2; + optional float z = 3; + } + // A vector indicating the orientation of the strongest directional light + // source, normalized in the world-coordinate space. + // See + // https://developer.apple.com/documentation/arkit/ardirectionallightestimate/2928221-primarylightdirection?language=objc + // for more information; + optional DirectionVector primary_light_direction = 4; + + // The estimated intensity, in lumens, of the strongest directional light + // source in the scene. + // See + // https://developer.apple.com/documentation/arkit/ardirectionallightestimate/2928219-primarylightintensity?language=objc + // for more information. + optional float primary_light_intensity = 5; +} + +// Information about the camera position and imaging characteristics for a +// captured video frame. +// See developer.apple.com/documentation/arkit/arcamera for more information. +message ARCamera { + // The general quality of position tracking available when the camera captured + // a frame. + enum TrackingState { + UNDEFINED_TRACKING_STATE = 0; + // Camera position tracking is not available. + UNAVAILABLE = 1; + // Tracking is available, but the quality of results is questionable. + LIMITED = 2; + // Camera position tracking is providing optimal results. + NORMAL = 3; + } + optional TrackingState tracking_state = 1 [default = UNAVAILABLE]; + + // A possible diagnosis for limited position tracking quality as of when the + // frame was captured. + enum TrackingStateReason { + UNDEFINED_TRACKING_STATE_REASON = 0; + // The current tracking state is not limited. + NONE = 1; + // Not yet enough camera or motion data to provide tracking information. + INITIALIZING = 2; + // The device is moving too fast for accurate image-based position tracking. + EXCESSIVE_MOTION = 3; + // Not enough distinguishable features for image-based position tracking. + INSUFFICIENT_FEATURES = 4; + // Tracking is limited due to a relocalization in progress. + RELOCALIZING = 5; + } + optional TrackingStateReason tracking_state_reason = 2 [default = NONE]; + + // 4x4 row-major matrix expressing position and orientation of the camera in + // world coordinate space. + // See developer.apple.com/documentation/arkit/arcamera/2866108-transform for + // more information. + repeated float transform = 3 [packed = true]; + + // The orientation of the camera, expressed as roll, pitch, and yaw values. + message EulerAngles { + optional float roll = 1; + optional float pitch = 2; + optional float yaw = 3; + } + optional EulerAngles euler_angles = 4; + + // The width and height, in pixels, of the captured camera image. + optional int32 image_resolution_width = 5; + optional int32 image_resolution_height = 6; + + // 3x3 row-major matrix that converts between the 2D camera plane and 3D world + // coordinate space. + // See developer.apple.com/documentation/arkit/arcamera/2875730-intrinsics for + // usage information. + repeated float intrinsics = 7 [packed = true]; + + // 4x4 row-major transform matrix appropriate for rendering 3D content to + // match the image captured by the camera. + // See + // developer.apple.com/documentation/arkit/arcamera/2887458-projectionmatrix + // for usage information. + repeated float projection_matrix = 8 [packed = true]; + + // 4x4 row-major transform matrix appropriate for converting from world-space + // to camera space. Relativized for the captured_image orientation (i.e. + // UILandscapeOrientationRight). + // See + // https://developer.apple.com/documentation/arkit/arcamera/2921672-viewmatrixfororientation?language=objc + // for more information. + repeated float view_matrix = 9 [packed = true]; +} + +// Container for a 3D mesh describing face topology. +message ARFaceGeometry { + // Each vertex represents a 3D point in the face mesh, in the face coordinate + // space. + // See developer.apple.com/documentation/arkit/arfacegeometry/2928201-vertices + // for more information. + message Vertex { + optional float x = 1; + optional float y = 2; + optional float z = 3; + } + repeated Vertex vertices = 1; + + // The number of elements in the vertices list. + optional int32 vertex_count = 2; + + // Each texture coordinate represents UV texture coordinates for the vertex at + // the corresponding index in the vertices buffer. + // See + // developer.apple.com/documentation/arkit/arfacegeometry/2928203-texturecoordinates + // for more information. + message TextureCoordinate { + optional float u = 1; + optional float v = 2; + } + repeated TextureCoordinate texture_coordinates = 3; + + // The number of elements in the texture_coordinates list. + optional int32 texture_coordinate_count = 4; + + // Each integer value in this ordered list represents an index into the + // vertices and texture_coordinates lists. Each set of three indices + // identifies the vertices comprising a single triangle in the mesh. Each set + // of three indices forms a triangle, so the number of indices in the + // triangle_indices buffer is three times the triangle_count value. + // See + // developer.apple.com/documentation/arkit/arfacegeometry/2928199-triangleindices + // for more information. + repeated int32 triangle_indices = 5 [packed = true]; + + // The number of triangles described by the triangle_indices buffer. + // See + // developer.apple.com/documentation/arkit/arfacegeometry/2928207-trianglecount + // for more information. + optional int32 triangle_count = 6; +} + +// Contains a list of blend shape entries wherein each item maps a specific +// blend shape location to its associated coefficient. +message ARBlendShapeMap { + message MapEntry { + // Identifier for the specific facial feature. + // See developer.apple.com/documentation/arkit/arblendshapelocation for a + // complete list of identifiers. + optional string blend_shape_location = 1; + + // Indicates the current position of the feature relative to its neutral + // configuration, ranging from 0.0 (neutral) to 1.0 (maximum movement). + optional float blend_shape_coefficient = 2; + } + repeated MapEntry entries = 1; +} + +// Information about the pose, topology, and expression of a detected face. +// See developer.apple.com/documentation/arkit/arfaceanchor for more info. +message ARFaceAnchor { + // A coarse triangle mesh representing the topology of the detected face. + optional ARFaceGeometry geometry = 1; + + // A map of named coefficients representing the detected facial expression in + // terms of the movement of specific facial features. + optional ARBlendShapeMap blend_shapes = 2; + + // 4x4 row-major matrix encoding the position, orientation, and scale of the + // anchor relative to the world coordinate space. + // See + // https://developer.apple.com/documentation/arkit/aranchor/2867981-transform?language=objc + // for more information. + repeated float transform = 3; + + // Indicates whether the anchor's transform is valid. Frames that have a face + // anchor with this value set to NO should probably be ignored. + optional bool is_tracked = 4; +} + +// Container for a 3D mesh. +message ARPlaneGeometry { + message Vertex { + optional float x = 1; + optional float y = 2; + optional float z = 3; + } + + // Each texture coordinate represents UV texture coordinates for the vertex at + // the corresponding index in the vertices buffer. + // See + // https://developer.apple.com/documentation/arkit/arfacegeometry/2928203-texturecoordinates + // for more information. + message TextureCoordinate { + optional float u = 1; + optional float v = 2; + } + + // A buffer of vertex positions for each point in the plane mesh. + repeated Vertex vertices = 1; + + // The number of elements in the vertices buffer. + optional int32 vertex_count = 2; + + // A buffer of texture coordinate values for each point in the plane mesh. + repeated TextureCoordinate texture_coordinates = 3; + + // The number of elements in the texture_coordinates buffer. + optional int32 texture_coordinate_count = 4; + + // Each integer value in this ordered list represents an index into the + // vertices and texture_coordinates lists. Each set of three indices + // identifies the vertices comprising a single triangle in the mesh. Each set + // of three indices forms a triangle, so the number of indices in the + // triangle_indices buffer is three times the triangle_count value. + // See + // https://developer.apple.com/documentation/arkit/arplanegeometry/2941051-triangleindices + // for more information. + repeated int32 triangle_indices = 5 [packed = true]; + + // Each set of three indices forms a triangle, so the number of indices in the + // triangle_indices buffer is three times the triangle_count value. + // See + // https://developer.apple.com/documentation/arkit/arplanegeometry/2941058-trianglecount + // for more information. + optional int32 triangle_count = 6; + + // Each value in this buffer represents the position of a vertex along the + // boundary polygon of the estimated plane. The owning plane anchor's + // transform matrix defines the coordinate system for these points. + // See + // https://developer.apple.com/documentation/arkit/arplanegeometry/2941052-boundaryvertices + // for more information. + repeated Vertex boundary_vertices = 7; + + // The number of elements in the boundary_vertices buffer. + optional int32 boundary_vertex_count = 8; +} + +// Information about the position and orientation of a real-world flat surface. +// See https://developer.apple.com/documentation/arkit/arplaneanchor for more +// information. +message ARPlaneAnchor { + enum Alignment { + UNDEFINED = 0; + // The plane is perpendicular to gravity. + HORIZONTAL = 1; + // The plane is parallel to gravity. + VERTICAL = 2; + } + + // Wrapper for a 3D point / vector within the plane. See extent and center + // values for more information. + message PlaneVector { + optional float x = 1; + optional float y = 2; + optional float z = 3; + } + + enum PlaneClassification { + NONE = 0; + WALL = 1; + FLOOR = 2; + CEILING = 3; + TABLE = 4; + SEAT = 5; + } + + // The classification status for the plane. + enum PlaneClassificationStatus { + // The classfication process for the plane anchor has completed but the + // result is inconclusive. + UNKNOWN = 0; + // No classication information can be provided (set on error or if the + // device does not support plane classification). + UNAVAILABLE = 1; + // The classification process has not completed. + UNDETERMINED = 2; + // The classfication process for the plane anchor has completed. + KNOWN = 3; + } + + // The ID of the plane. + optional string identifier = 1; + + // 4x4 row-major matrix encoding the position, orientation, and scale of the + // anchor relative to the world coordinate space. + // See + // https://developer.apple.com/documentation/arkit/aranchor/2867981-transform + // for more information. + repeated float transform = 2; + + // The general orientation of the detected plane with respect to gravity. + optional Alignment alignment = 3; + + // A coarse triangle mesh representing the general shape of the detected + // plane. + optional ARPlaneGeometry geometry = 4; + + // The center point of the plane relative to its anchor position. + // Although the type of this property is a 3D vector, a plane anchor is always + // two-dimensional, and is always positioned in only the x and z directions + // relative to its transform position. (That is, the y-component of this + // vector is always zero.) + // See + // https://developer.apple.com/documentation/arkit/arplaneanchor/2882056-center + // for more information. + optional PlaneVector center = 5; + + // The estimated width and length of the detected plane. + // See + // https://developer.apple.com/documentation/arkit/arplaneanchor/2882055-extent + // for more information. + optional PlaneVector extent = 6; + + // A Boolean value that indicates whether plane classification is available on + // the current device. On devices without plane classification support, all + // plane anchors report a classification value of NONE + // and a classification_status value of UNAVAILABLE. + optional bool classification_supported = 7; + + // A general characterization of what kind of real-world surface the plane + // anchor represents. + // See + // https://developer.apple.com/documentation/arkit/arplaneanchor/2990936-classification + // for more information. + optional PlaneClassification classification = 8; + + // The current state of ARKit's process for classifying the plane anchor. + // When this property's value is KNOWN, the classification property represents + // ARKit's characterization of the real-world surface corresponding to the + // plane anchor. + // See + // https://developer.apple.com/documentation/arkit/arplaneanchor/2990937-classificationstatus + // for more information. + optional PlaneClassificationStatus classification_status = 9; +} + +// A collection of points in the world coordinate space. +// See https://developer.apple.com/documentation/arkit/arpointcloud for more +// information. +message ARPointCloud { + message Point { + optional float x = 1; + optional float y = 2; + optional float z = 3; + } + + // The number of points in the cloud. + optional int32 count = 1; + + // The list of detected points. + repeated Point point = 2; + + // A list of unique identifiers corresponding to detected feature points. + // Each identifier in this list corresponds to the point at the same index + // in the points array. + repeated int64 identifier = 3 [packed = true]; +} + +// Video image and face position tracking information. +// See developer.apple.com/documentation/arkit/arframe for more information. +message ARFrame { + // The timestamp for the frame. + optional double timestamp = 1; + + // The depth data associated with the frame. Not all frames have depth data. + optional AVDepthData depth_data = 2; + + // The depth data object timestamp associated with the frame. May differ from + // the frame timestamp value. Is only set when the frame has depth_data. + optional double depth_data_timestamp = 3; + + // Camera information associated with the frame. + optional ARCamera camera = 4; + + // Light information associated with the frame. + optional ARLightEstimate light_estimate = 5; + + // Face anchor information associated with the frame. Not all frames have an + // active face anchor. + optional ARFaceAnchor face_anchor = 6; + + // Plane anchors associated with the frame. Not all frames have a plane + // anchor. Plane anchors and face anchors are mutually exclusive. + repeated ARPlaneAnchor plane_anchor = 7; + + // The current intermediate results of the scene analysis used to perform + // world tracking. + // See + // https://developer.apple.com/documentation/arkit/arframe/2887449-rawfeaturepoints + // for more information. + optional ARPointCloud raw_feature_points = 8; +} diff --git a/mediapipe/modules/objectron/calculators/annotation_data.proto b/mediapipe/modules/objectron/calculators/annotation_data.proto new file mode 100644 index 0000000..6c26d29 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/annotation_data.proto @@ -0,0 +1,108 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package mediapipe; + +import "mediapipe/modules/objectron/calculators/a_r_capture_metadata.proto"; +import "mediapipe/modules/objectron/calculators/object.proto"; + +// Projection of a 3D point on an image, and its metric depth. +message NormalizedPoint2D { + // x-y position of the 2d keypoint in the image coordinate system. + // u,v \in [0, 1], where top left corner is (0, 0) and the bottom-right corner + // is (1, 1). + float x = 1; + float y = 2; + + // The depth of the point in the camera coordinate system (in meters). + float depth = 3; +} + +// The 3D point in the camera coordinate system, the scales are in meters. +message Point3D { + float x = 1; + float y = 2; + float z = 3; +} + +message AnnotatedKeyPoint { + int32 id = 1; + Point3D point_3d = 2; + NormalizedPoint2D point_2d = 3; + // Indicates whether this keypoint is hidden or not. The hidden attribute is + // determined from the object's skeleton. For box model, none of the keypoints + // are hidden. + bool hidden = 4; +} + +message ObjectAnnotation { + // Reference to the object identifier in ObjectInstance. + int32 object_id = 1; + + // For each objects, list all the annotated keypoints here. + // E.g. for bounding-boxes, we have 8 keypoints, hands = 21 keypoints, etc. + // These normalized points are the projection of the Object's 3D keypoint + // on the current frame's camera poses. + repeated AnnotatedKeyPoint keypoints = 2; + + // Visibiity of this annotation in a frame. + float visibility = 3; + + // 3x3 row-major rotation matrix describing the orientation of the rigid + // object's frame of reference in the camera-coordinate system. + repeated float rotation = 4; + + // 3x1 vector describing the translation of the rigid object's frame of + // reference in the camera-coordinate system in meters. + repeated float translation = 5; + + // 3x1 vector describing the scale of the rigid object's frame of reference in + // the camera-coordinate system. + repeated float scale = 6; +} + +message FrameAnnotation { + // Unique frame id, corresponds to images. + int32 frame_id = 1; + + // List of the annotated objects in this frame. Depending on how many object + // are observable in this frame, we might have non or as much as + // sequence.objects_size() annotations. + repeated ObjectAnnotation annotations = 2; + + // Information about the camera transformation (in the world coordinate) and + // imaging characteristics for a captured video frame. + ARCamera camera = 3; + + // The timestamp for the frame. + double timestamp = 4; + + // Plane center and normal in camera frame. + repeated float plane_center = 5; + repeated float plane_normal = 6; +} + +// The sequence protocol contains the annotation data for the entire video clip. +message Sequence { + // List of all the annotated 3D objects in this sequence in the world + // Coordinate system. Given the camera poses of each frame (also in the + // world-coordinate) these objects bounding boxes can be projected to each + // frame to get the per-frame annotation (i.e. image_annotation below). + repeated Object objects = 1; + + // List of annotated data per each frame in sequence + frame information. + repeated FrameAnnotation frame_annotations = 2; +} diff --git a/mediapipe/modules/objectron/calculators/belief_decoder_config.proto b/mediapipe/modules/objectron/calculators/belief_decoder_config.proto new file mode 100644 index 0000000..f0f10ae --- /dev/null +++ b/mediapipe/modules/objectron/calculators/belief_decoder_config.proto @@ -0,0 +1,38 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +message BeliefDecoderConfig { + optional float heatmap_threshold = 1 [default = 0.9]; + // Maximum distance in pixels between two local max heatmap values. + optional float local_max_distance = 2 [default = 10.0]; + // Coefficient of offset_scale. + // offset_scale = offset_scale_coef * min(rows, cols). + // offset_scale is used to multiply the offset predictions from the network. + optional float offset_scale_coef = 3 [default = 0.5, deprecated = true]; + + // The radius for vertex voting. Use no voting if the radius is less than or + // euqal to 1. Example: 10. + optional int32 voting_radius = 4; + + // The number of pixels to determine whether two points are the same. + // Example: 5 (voting_radius / 2). + optional int32 voting_allowance = 5; + + // The threshold of beliefs, with which the points can vote. Example: 0.2. + optional float voting_threshold = 6; +} diff --git a/mediapipe/modules/objectron/calculators/box.cc b/mediapipe/modules/objectron/calculators/box.cc new file mode 100644 index 0000000..bd2ce57 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/box.cc @@ -0,0 +1,255 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/box.h" + +#include "Eigen/Core" +#include "mediapipe/framework/port/logging.h" + +namespace mediapipe { + +namespace { +constexpr int kFrontFaceId = 4; +constexpr int kTopFaceId = 2; +constexpr int kNumKeypoints = 8 + 1; +constexpr int kNumberOfAxis = 3; +constexpr int kEdgesPerAxis = 4; + +} // namespace + +Box::Box(const std::string& category) + : Model(kBoundingBox, kNumKeypoints, category), + bounding_box_(kNumKeypoints) { + transformation_.setIdentity(); + + scale_ << 0.1, 0.1, 0.1; + + // The vertices are ordered according to the left-hand rule, so the normal + // vector of each face will point inward the box. + faces_.push_back({5, 6, 8, 7}); // +x on yz plane + faces_.push_back({1, 3, 4, 2}); // -x on yz plane + + faces_.push_back({3, 7, 8, 4}); // +y on xz plane = top + faces_.push_back({1, 2, 6, 5}); // -y on xz plane + + faces_.push_back({2, 4, 8, 6}); // +z on xy plane = front + faces_.push_back({1, 5, 7, 3}); // -z on xy plane + + // Add the edges in the cube, they are sorted according to axis (x-y-z). + edges_.push_back({1, 5}); + edges_.push_back({2, 6}); + edges_.push_back({3, 7}); + edges_.push_back({4, 8}); + + edges_.push_back({1, 3}); + edges_.push_back({5, 7}); + edges_.push_back({2, 4}); + edges_.push_back({6, 8}); + + edges_.push_back({1, 2}); + edges_.push_back({3, 4}); + edges_.push_back({5, 6}); + edges_.push_back({7, 8}); + Update(); +} + +void Box::Update() { + // Compute the eight vertices of the bounding box from Box's parameters + auto w = scale_[0] / 2.f; + auto h = scale_[1] / 2.f; + auto d = scale_[2] / 2.f; + + // Define the local coordinate system, w.r.t. the center of the boxs + bounding_box_[0] << 0., 0., 0.; + bounding_box_[1] << -w, -h, -d; + bounding_box_[2] << -w, -h, +d; + bounding_box_[3] << -w, +h, -d; + bounding_box_[4] << -w, +h, +d; + bounding_box_[5] << +w, -h, -d; + bounding_box_[6] << +w, -h, +d; + bounding_box_[7] << +w, +h, -d; + bounding_box_[8] << +w, +h, +d; + + // Convert to world coordinate system + for (int i = 0; i < kNumKeypoints; ++i) { + bounding_box_[i] = + transformation_.topLeftCorner<3, 3>() * bounding_box_[i] + + transformation_.col(3).head<3>(); + } +} + +void Box::Adjust(const std::vector& variables) { + Eigen::Vector3f translation; + translation << variables[0], variables[1], variables[2]; + SetTranslation(translation); + + const float roll = variables[3]; + const float pitch = variables[4]; + const float yaw = variables[5]; + SetRotation(roll, pitch, yaw); + + Eigen::Vector3f scale; + scale << variables[6], variables[7], variables[8]; + + SetScale(scale); + Update(); +} + +float* Box::GetVertex(size_t vertex_id) { + CHECK_LT(vertex_id, kNumKeypoints); + return bounding_box_[vertex_id].data(); +} + +const float* Box::GetVertex(size_t vertex_id) const { + CHECK_LT(vertex_id, kNumKeypoints); + return bounding_box_[vertex_id].data(); +} + +bool Box::InsideTest(const Eigen::Vector3f& point, int check_axis) const { + const float* v0 = GetVertex(1); + const float* v1 = GetVertex(2); + const float* v2 = GetVertex(3); + const float* v4 = GetVertex(5); + + switch (check_axis) { + case 1: + return (v0[0] <= point[0] && point[0] <= v1[0]); // X-axis + case 2: + return (v0[1] <= point[1] && point[1] <= v2[1]); // Y-axis + case 3: + return (v0[2] <= point[2] && point[2] <= v4[2]); // Z-axis + default: + return false; + } +} + +void Box::Deserialize(const Object& obj) { + CHECK_EQ(obj.keypoints_size(), kNumKeypoints); + Model::Deserialize(obj); +} + +void Box::Serialize(Object* obj) { + Model::Serialize(obj); + obj->set_type(Object::BOUNDING_BOX); + std::vector local_bounding_box(9); + // Define the local coordinate system, w.r.t. the center of the boxs + local_bounding_box[0] << 0., 0., 0.; + local_bounding_box[1] << -0.5, -0.5, -0.5; + local_bounding_box[2] << -0.5, -0.5, +0.5; + local_bounding_box[3] << -0.5, +0.5, -0.5; + local_bounding_box[4] << -0.5, +0.5, +0.5; + local_bounding_box[5] << +0.5, -0.5, -0.5; + local_bounding_box[6] << +0.5, -0.5, +0.5; + local_bounding_box[7] << +0.5, +0.5, -0.5; + local_bounding_box[8] << +0.5, +0.5, +0.5; + for (int i = 0; i < kNumKeypoints; ++i) { + KeyPoint* keypoint = obj->add_keypoints(); + keypoint->set_x(local_bounding_box[i][0]); + keypoint->set_y(local_bounding_box[i][1]); + keypoint->set_z(local_bounding_box[i][2]); + keypoint->set_confidence_radius(0.); + } +} + +const Face& Box::GetFrontFace() const { return faces_[kFrontFaceId]; } + +const Face& Box::GetTopFace() const { return faces_[kTopFaceId]; } + +std::pair Box::GetGroundPlane() const { + const Vector3f gravity = Vector3f(0., 1., 0.); + int ground_plane_id = 0; + float ground_plane_error = 10.0; + + auto get_face_center = [&](const Face& face) { + Vector3f center = Vector3f::Zero(); + for (const int vertex_id : face) { + center += Map(GetVertex(vertex_id)); + } + center /= face.size(); + return center; + }; + + auto get_face_normal = [&](const Face& face, const Vector3f& center) { + Vector3f v1 = Map(GetVertex(face[0])) - center; + Vector3f v2 = Map(GetVertex(face[1])) - center; + Vector3f normal = v1.cross(v2); + return normal; + }; + + // The ground plane is defined as a plane aligned with gravity. + // gravity is the (0, 1, 0) vector in the world coordinate system. + const auto& faces = GetFaces(); + for (int face_id = 0; face_id < faces.size(); face_id += 2) { + const auto& face = faces[face_id]; + Vector3f center = get_face_center(face); + Vector3f normal = get_face_normal(face, center); + Vector3f w = gravity.cross(normal); + const float w_sq_norm = w.squaredNorm(); + if (w_sq_norm < ground_plane_error) { + ground_plane_error = w_sq_norm; + ground_plane_id = face_id; + } + } + + Vector3f center = get_face_center(faces[ground_plane_id]); + Vector3f normal = get_face_normal(faces[ground_plane_id], center); + + // For each face, we also have a parallel face that it's normal is also + // aligned with gravity vector. We pick the face with lower height (y-value). + // The parallel to face 0 is 1, face 2 is 3, and face 4 is 5. + int parallel_face_id = ground_plane_id + 1; + const auto& parallel_face = faces[parallel_face_id]; + Vector3f parallel_face_center = get_face_center(parallel_face); + Vector3f parallel_face_normal = + get_face_normal(parallel_face, parallel_face_center); + if (parallel_face_center[1] < center[1]) { + center = parallel_face_center; + normal = parallel_face_normal; + } + return {center, normal}; +} + +template +void Box::Fit(const std::vector& vertices) { + CHECK_EQ(vertices.size(), kNumKeypoints); + scale_.setZero(); + // The scale would remain invariant under rotation and translation. + // We can safely estimate the scale from the oriented box. + for (int axis = 0; axis < kNumberOfAxis; ++axis) { + for (int edge_id = 0; edge_id < kEdgesPerAxis; ++edge_id) { + // The edges are stored in quadruples according to each axis + const std::array& edge = edges_[axis * kEdgesPerAxis + edge_id]; + scale_[axis] += (vertices[edge[0]] - vertices[edge[1]]).norm(); + } + scale_[axis] /= kEdgesPerAxis; + } + // Create a scaled axis-aligned box + transformation_.setIdentity(); + Update(); + + using MatrixN3_RM = Eigen::Matrix; + Eigen::Map v(vertices[0].data()); + Eigen::Map system(bounding_box_[0].data()); + auto system_h = system.rowwise().homogeneous().eval(); + auto system_g = system_h.colPivHouseholderQr(); + auto solution = system_g.solve(v).eval(); + transformation_.topLeftCorner<3, 4>() = solution.transpose(); + Update(); +} + +template void Box::Fit(const std::vector&); +template void Box::Fit>(const std::vector>&); +template void Box::Fit>( + const std::vector>&); +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/box.h b/mediapipe/modules/objectron/calculators/box.h new file mode 100644 index 0000000..17218f7 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/box.h @@ -0,0 +1,132 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_H_ + +#include + +#include "mediapipe/modules/objectron/calculators/model.h" + +namespace mediapipe { + +// Model for the bounding box in 3D +// The box has 9 degrees of freedom, which uniquely defines 8 keypoints in the +// fixed world-coordinate system. +// +// The 8 keypoints are defined as follows +// +// kp-id axis +// 0 000 --- +// 1 001 --+ +// 2 010 -+- +// 3 011 -++ +// 4 100 +-- +// 5 101 +-+ +// 6 110 ++- +// 7 111 +++ +// +// where xyz means positive or negative vector along the axis where the center +// of the box is the origin. The resulting bounding box is +// +// x x +// 0 + + + + + + + + 4 .------- +// +\ +\ |\ +// + \ y + \ z | \ y +// + \ + \ | \ +// + 2 + + + + + + + + 6 +// z + + + + +// + + + + +// + + C + + +// + + + + +// 1 + + + + + + + + 5 + +// \ + \ + +// \ + \ + +// \+ \+ +// 3 + + + + + + + + 7 +// +// World coordinate system: +y is up (aligned with gravity), +// +z is toward the user, +x follows right hand rule. +// The front face is defined as +z axis on xy plane. +// The top face is defined as +y axis on xz plane. +// + +class Box : public Model { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + explicit Box(const std::string& category); + ~Box() override = default; + + bool InsideTest(const Vector3f& point, int check_axis) const; + + const std::vector& GetFaces() const { return faces_; } + const Face& GetFace(size_t face_id) const { return faces_[face_id]; } + + const std::vector>& GetEdges() const { return edges_; } + const std::array& GetEdge(size_t edge_id) const { + return edges_[edge_id]; + } + + // Returns the keypoints for the front face of the box. + // The front face is defind as a face with +z normal vector on xy plane + // In Box's c'tor, the top face is set to {1, 3, 7, 5} + const Face& GetFrontFace() const; + + // Returns the keypoints for the top face of the box. + // The top face is defind as a face with +z normal vector on xy plane + // In Box's c'tor, the top face is set to {1, 3, 7, 5} + const Face& GetTopFace() const; + + void Update() override; + void Adjust(const std::vector& variables) override; + float* GetVertex(size_t vertex_id) override; + const float* GetVertex(size_t vertex_id) const override; + void Deserialize(const Object& obj) override; + void Serialize(Object* obj) override; + + // Computes the plane center and the normal vector for the plane the object + // is sitting on in the world cooordinate system. The normal vector is roughly + // aligned with gravity. + std::pair GetGroundPlane() const; + + // Estimates a box 9-dof parameters from the given vertices. Directly computes + // the scale of the box, then solves for orientation and translation. + // Expects a std::vector of size 9 of a Eigen::Vector3f or mapped Vector3f. + // If mapping proto messages, we recommend to use the Map. + // For example: + // + // using T = Map; + // std::vector vertices; + // for (const auto& point : message) { // point is a repeated float message. + // T p(point.data()); + // vertices.emplace_back(p); + // } + // box.Fit(vertices); + // + // The Points must be arranged as 1 + 8 (center keypoint followed by 8 box + // vertices) vector. This function will overwrite the scale and transformation + // properties of the class. + template > + void Fit(const std::vector& vertices); + + private: + std::vector faces_; + std::vector> edges_; + std::vector bounding_box_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_H_ diff --git a/mediapipe/modules/objectron/calculators/box_util.cc b/mediapipe/modules/objectron/calculators/box_util.cc new file mode 100644 index 0000000..0663b5b --- /dev/null +++ b/mediapipe/modules/objectron/calculators/box_util.cc @@ -0,0 +1,153 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/box_util.h" + +#include + +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { +void ComputeBoundingRect(const std::vector& points, + mediapipe::TimedBoxProto* box) { + CHECK(box != nullptr); + float top = 1.0f; + float bottom = 0.0f; + float left = 1.0f; + float right = 0.0f; + for (const auto& point : points) { + top = std::min(top, point.y); + bottom = std::max(bottom, point.y); + left = std::min(left, point.x); + right = std::max(right, point.x); + } + box->set_top(top); + box->set_bottom(bottom); + box->set_left(left); + box->set_right(right); + // We are currently only doing axis aligned bounding box. If we need to + // compute rotated bounding box, then we need the original image aspect ratio, + // map back to original image space, compute cv::convexHull, then for each + // edge of the hull, rotate according to edge orientation, find the box. + box->set_rotation(0.0f); +} + +float ComputeBoxIoU(const TimedBoxProto& box1, const TimedBoxProto& box2) { + cv::Point2f box1_center((box1.left() + box1.right()) * 0.5f, + (box1.top() + box1.bottom()) * 0.5f); + cv::Size2f box1_size(box1.right() - box1.left(), box1.bottom() - box1.top()); + cv::RotatedRect rect1(box1_center, box1_size, + -box1.rotation() * 180.0f / M_PI); + cv::Point2f box2_center((box2.left() + box2.right()) * 0.5f, + (box2.top() + box2.bottom()) * 0.5f); + cv::Size2f box2_size(box2.right() - box2.left(), box2.bottom() - box2.top()); + cv::RotatedRect rect2(box2_center, box2_size, + -box2.rotation() * 180.0f / M_PI); + std::vector intersections_unsorted; + std::vector intersections; + cv::rotatedRectangleIntersection(rect1, rect2, intersections_unsorted); + if (intersections_unsorted.size() < 3) { + return 0.0f; + } + cv::convexHull(intersections_unsorted, intersections); + + // We use Shoelace formula to compute area of polygons. + float intersection_area = 0.0f; + for (int i = 0; i < intersections.size(); ++i) { + const auto& curr_pt = intersections[i]; + const int i_next = (i + 1) == intersections.size() ? 0 : (i + 1); + const auto& next_pt = intersections[i_next]; + intersection_area += (curr_pt.x * next_pt.y - next_pt.x * curr_pt.y); + } + intersection_area = std::abs(intersection_area) * 0.5f; + + // Compute union area + const float union_area = + rect1.size.area() + rect2.size.area() - intersection_area + 1e-5f; + + const float iou = intersection_area / union_area; + return iou; +} + +std::vector ComputeBoxCorners(const TimedBoxProto& box, + float width, float height) { + // Rotate 4 corner w.r.t. center. + const cv::Point2f center(0.5f * (box.left() + box.right()) * width, + 0.5f * (box.top() + box.bottom()) * height); + const std::vector corners{ + cv::Point2f(box.left() * width, box.top() * height), + cv::Point2f(box.left() * width, box.bottom() * height), + cv::Point2f(box.right() * width, box.bottom() * height), + cv::Point2f(box.right() * width, box.top() * height)}; + + const float cos_a = std::cos(box.rotation()); + const float sin_a = std::sin(box.rotation()); + std::vector transformed_corners(4); + for (int k = 0; k < 4; ++k) { + // Scale and rotate w.r.t. center. + const cv::Point2f rad = corners[k] - center; + const cv::Point2f rot_rad(cos_a * rad.x - sin_a * rad.y, + sin_a * rad.x + cos_a * rad.y); + transformed_corners[k] = center + rot_rad; + transformed_corners[k].x /= width; + transformed_corners[k].y /= height; + } + return transformed_corners; +} + +cv::Mat PerspectiveTransformBetweenBoxes(const TimedBoxProto& src_box, + const TimedBoxProto& dst_box, + const float aspect_ratio) { + std::vector box1_corners = + ComputeBoxCorners(src_box, /*width*/ aspect_ratio, /*height*/ 1.0f); + std::vector box2_corners = + ComputeBoxCorners(dst_box, /*width*/ aspect_ratio, /*height*/ 1.0f); + cv::Mat affine_transform = cv::getPerspectiveTransform( + /*src*/ box1_corners, /*dst*/ box2_corners); + cv::Mat output_affine; + affine_transform.convertTo(output_affine, CV_32FC1); + return output_affine; +} + +cv::Point2f MapPoint(const TimedBoxProto& src_box, const TimedBoxProto& dst_box, + const cv::Point2f& src_point, float width, float height) { + const cv::Point2f src_center( + 0.5f * (src_box.left() + src_box.right()) * width, + 0.5f * (src_box.top() + src_box.bottom()) * height); + const cv::Point2f dst_center( + 0.5f * (dst_box.left() + dst_box.right()) * width, + 0.5f * (dst_box.top() + dst_box.bottom()) * height); + const float scale_x = + (dst_box.right() - dst_box.left()) / (src_box.right() - src_box.left()); + const float scale_y = + (dst_box.bottom() - dst_box.top()) / (src_box.bottom() - src_box.top()); + const float rotation = dst_box.rotation() - src_box.rotation(); + const cv::Point2f rad = + cv::Point2f(src_point.x * width, src_point.y * height) - src_center; + const float rad_x = rad.x * scale_x; + const float rad_y = rad.y * scale_y; + const float cos_a = std::cos(rotation); + const float sin_a = std::sin(rotation); + const cv::Point2f rot_rad(cos_a * rad_x - sin_a * rad_y, + sin_a * rad_x + cos_a * rad_y); + const cv::Point2f dst_point_image = dst_center + rot_rad; + const cv::Point2f dst_point(dst_point_image.x / width, + dst_point_image.y / height); + return dst_point; +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/box_util.h b/mediapipe/modules/objectron/calculators/box_util.h new file mode 100644 index 0000000..fed21c0 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/box_util.h @@ -0,0 +1,50 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_UTIL_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_UTIL_H_ + +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { + +// This function fills the geometry of the TimedBoxProto. Id, timestamp etc. +// need to be set outside this function. +void ComputeBoundingRect(const std::vector& points, + mediapipe::TimedBoxProto* box); + +// This function computes the intersection over union between two boxes. +float ComputeBoxIoU(const TimedBoxProto& box1, const TimedBoxProto& box2); + +// Computes corners of the box. +// width and height are image width and height, which is typically +// needed since the box is in normalized coordinates. +std::vector ComputeBoxCorners(const TimedBoxProto& box, + float width, float height); + +// Computes the perspective transform from box1 to box2. +// The input argument aspect_ratio is width / height of the image. +// The returned matrix should be a 3x3 matrix. +cv::Mat PerspectiveTransformBetweenBoxes(const TimedBoxProto& src_box, + const TimedBoxProto& dst_box, + const float aspect_ratio); + +// Map point according to source and destination box location. +cv::Point2f MapPoint(const TimedBoxProto& src_box, const TimedBoxProto& dst_box, + const cv::Point2f& src_point, float width, float height); + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_BOX_UTIL_H_ diff --git a/mediapipe/modules/objectron/calculators/box_util_test.cc b/mediapipe/modules/objectron/calculators/box_util_test.cc new file mode 100644 index 0000000..2a3895f --- /dev/null +++ b/mediapipe/modules/objectron/calculators/box_util_test.cc @@ -0,0 +1,123 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/box_util.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { +namespace { + +TEST(BoxUtilTest, TestComputeBoundingRect) { + std::vector points{ + cv::Point2f(0.35f, 0.25f), cv::Point2f(0.3f, 0.3f), + cv::Point2f(0.2f, 0.4f), cv::Point2f(0.3f, 0.1f), + cv::Point2f(0.2f, 0.2f), cv::Point2f(0.5f, 0.3f), + cv::Point2f(0.4f, 0.4f), cv::Point2f(0.5f, 0.1f), + cv::Point2f(0.4f, 0.2f)}; + TimedBoxProto box; + ComputeBoundingRect(points, &box); + EXPECT_FLOAT_EQ(0.1f, box.top()); + EXPECT_FLOAT_EQ(0.4f, box.bottom()); + EXPECT_FLOAT_EQ(0.2f, box.left()); + EXPECT_FLOAT_EQ(0.5f, box.right()); +} + +TEST(BoxUtilTest, TestComputeBoxIoU) { + TimedBoxProto box1; + box1.set_top(0.2f); + box1.set_bottom(0.6f); + box1.set_left(0.1f); + box1.set_right(0.3f); + box1.set_rotation(0.0f); + TimedBoxProto box2 = box1; + box2.set_rotation(/*pi/2*/ 1.570796f); + const float box_area = + (box1.bottom() - box1.top()) * (box1.right() - box1.left()); + const float box_intersection = + (box1.right() - box1.left()) * (box1.right() - box1.left()); + const float expected_iou = + box_intersection / (box_area * 2 - box_intersection); + EXPECT_NEAR(expected_iou, ComputeBoxIoU(box1, box2), 3e-5f); + + TimedBoxProto box3; + box3.set_top(0.2f); + box3.set_bottom(0.6f); + box3.set_left(0.5f); + box3.set_right(0.7f); + EXPECT_NEAR(0.0f, ComputeBoxIoU(box1, box3), 3e-5f); +} + +TEST(BoxUtilTest, TestPerspectiveTransformBetweenBoxes) { + TimedBoxProto box1; + const float height = 4.0f; + const float width = 3.0f; + box1.set_top(1.0f / height); + box1.set_bottom(2.0f / height); + box1.set_left(1.0f / width); + box1.set_right(2.0f / width); + TimedBoxProto box2; + box2.set_top(1.0f / height); + box2.set_bottom(2.0f / height); + box2.set_left(1.0f / width); + box2.set_right(2.0f / width); + box2.set_rotation(/*pi/4*/ -0.785398f); + cv::Mat transform = + PerspectiveTransformBetweenBoxes(box1, box2, width / height); + const float kTolerence = 1e-5f; + const cv::Vec3f original_position(1.5f / width, 1.0f / height, 1.0f); + const cv::Mat transformed_position = transform * cv::Mat(original_position); + EXPECT_NEAR( + (1.5f - 0.5f * std::sqrt(2) / 2.0f) / width, + transformed_position.at(0) / transformed_position.at(2), + kTolerence); + EXPECT_NEAR( + (1.5f - 0.5f * std::sqrt(2) / 2.0f) / height, + transformed_position.at(1) / transformed_position.at(2), + kTolerence); +} + +TEST(BoxUtilTest, TestMapPoint) { + const float height = 4.0f; + const float width = 3.0f; + TimedBoxProto box1; + box1.set_top(1.0f / height); + box1.set_bottom(2.0f / height); + box1.set_left(1.0f / width); + box1.set_right(2.0f / width); + TimedBoxProto box2; + box2.set_top(1.0f / height); + box2.set_bottom(2.0f / height); + box2.set_left(1.0f / width); + box2.set_right(2.0f / width); + box2.set_rotation(/*pi/4*/ -0.785398f); + + cv::Point2f src_point1(1.2f / width, 1.4f / height); + cv::Point2f src_point2(1.3f / width, 1.8f / height); + const float distance1 = std::sqrt(0.1 * 0.1 + 0.4 * 0.4); + cv::Point2f dst_point1 = MapPoint(box1, box2, src_point1, width, height); + cv::Point2f dst_point2 = MapPoint(box1, box2, src_point2, width, height); + const float distance2 = + std::sqrt((dst_point1.x * width - dst_point2.x * width) * + (dst_point1.x * width - dst_point2.x * width) + + (dst_point1.y * height - dst_point2.y * height) * + (dst_point1.y * height - dst_point2.y * height)); + EXPECT_NEAR(distance1, distance2, 1e-5f); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/camera_parameters.proto b/mediapipe/modules/objectron/calculators/camera_parameters.proto new file mode 100644 index 0000000..f5c843b --- /dev/null +++ b/mediapipe/modules/objectron/calculators/camera_parameters.proto @@ -0,0 +1,47 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +message CameraParametersProto { + // This number is non-negative, it represents camera height above ground + // normalized by focal length. + optional float height_above_ground = 1 [default = 100.0]; + // Width of image in portrait orientation normalized by focal length + optional float portrait_width = 2 [default = 1.0103]; + // Height of image in portrait orientation normalized by focal length + optional float portrait_height = 3 [default = 1.3435]; + enum ImageOrientation { + PORTRAIT_ORIENTATION = 0; + LANDSCAPE_ORIENTATION = 1; + } + // The input image orientation + optional ImageOrientation image_orientation = 4 + [default = PORTRAIT_ORIENTATION]; + + // This defines the projection method from 2D screen to 3D. + enum ProjectionMode { + UNSPECIFIED = 0; + // Projects 2D point to ground plane (horizontal plane). + GROUND_PLANE = 1; + // Projects 2D point to sphere. + SPHERE = 2; + } + optional ProjectionMode projection_mode = 5 [default = GROUND_PLANE]; + // Radius of sphere when using the SPHERE projection mode above. + // The value is normalized by focal length. + optional float projection_sphere_radius = 6 [default = 100.0]; +} diff --git a/mediapipe/modules/objectron/calculators/decoder.cc b/mediapipe/modules/objectron/calculators/decoder.cc new file mode 100644 index 0000000..0af3458 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/decoder.cc @@ -0,0 +1,252 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/decoder.h" + +#include +#include + +#include "Eigen/Core" +#include "Eigen/Dense" +#include "absl/status/status.h" +#include "mediapipe/framework/port/canonical_errors.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/box.h" +#include "mediapipe/modules/objectron/calculators/epnp.h" +#include "mediapipe/modules/objectron/calculators/types.h" + +namespace mediapipe { + +constexpr int Decoder::kNumOffsetmaps = 16; +constexpr int kNumKeypoints = 9; + +namespace { + +inline void SetPoint3d(const Eigen::Vector3f& point_vec, Point3D* point_3d) { + point_3d->set_x(point_vec.x()); + point_3d->set_y(point_vec.y()); + point_3d->set_z(point_vec.z()); +} + +} // namespace + +FrameAnnotation Decoder::DecodeBoundingBoxKeypoints( + const cv::Mat& heatmap, const cv::Mat& offsetmap) const { + CHECK_EQ(1, heatmap.channels()); + CHECK_EQ(kNumOffsetmaps, offsetmap.channels()); + CHECK_EQ(heatmap.cols, offsetmap.cols); + CHECK_EQ(heatmap.rows, offsetmap.rows); + + const float offset_scale = std::min(offsetmap.cols, offsetmap.rows); + const std::vector center_points = ExtractCenterKeypoints(heatmap); + std::vector boxes; + for (const auto& center_point : center_points) { + BeliefBox box; + box.box_2d.emplace_back(center_point.x, center_point.y); + const int center_x = static_cast(std::round(center_point.x)); + const int center_y = static_cast(std::round(center_point.y)); + box.belief = heatmap.at(center_y, center_x); + if (config_.voting_radius() > 1) { + DecodeByVoting(heatmap, offsetmap, center_x, center_y, offset_scale, + offset_scale, &box); + } else { + DecodeByPeak(offsetmap, center_x, center_y, offset_scale, offset_scale, + &box); + } + if (IsNewBox(&boxes, &box)) { + boxes.push_back(std::move(box)); + } + } + + const float x_scale = 1.0f / offsetmap.cols; + const float y_scale = 1.0f / offsetmap.rows; + FrameAnnotation frame_annotations; + for (const auto& box : boxes) { + auto* object = frame_annotations.add_annotations(); + for (const auto& point : box.box_2d) { + auto* point2d = object->add_keypoints()->mutable_point_2d(); + point2d->set_x(point.first * x_scale); + point2d->set_y(point.second * y_scale); + } + } + return frame_annotations; +} + +void Decoder::DecodeByPeak(const cv::Mat& offsetmap, int center_x, int center_y, + float offset_scale_x, float offset_scale_y, + BeliefBox* box) const { + const auto& offset = offsetmap.at>( + /*row*/ center_y, /*col*/ center_x); + for (int i = 0; i < kNumOffsetmaps / 2; ++i) { + const float x_offset = offset[2 * i] * offset_scale_x; + const float y_offset = offset[2 * i + 1] * offset_scale_y; + box->box_2d.emplace_back(center_x + x_offset, center_y + y_offset); + } +} + +void Decoder::DecodeByVoting(const cv::Mat& heatmap, const cv::Mat& offsetmap, + int center_x, int center_y, float offset_scale_x, + float offset_scale_y, BeliefBox* box) const { + // Votes at the center. + const auto& center_offset = offsetmap.at>( + /*row*/ center_y, /*col*/ center_x); + std::vector center_votes(kNumOffsetmaps, 0.f); + for (int i = 0; i < kNumOffsetmaps / 2; ++i) { + center_votes[2 * i] = center_x + center_offset[2 * i] * offset_scale_x; + center_votes[2 * i + 1] = + center_y + center_offset[2 * i + 1] * offset_scale_y; + } + + // Find voting window. + int x_min = std::max(0, center_x - config_.voting_radius()); + int y_min = std::max(0, center_y - config_.voting_radius()); + int width = std::min(heatmap.cols - x_min, config_.voting_radius() * 2 + 1); + int height = std::min(heatmap.rows - y_min, config_.voting_radius() * 2 + 1); + cv::Rect rect(x_min, y_min, width, height); + cv::Mat heat = heatmap(rect); + cv::Mat offset = offsetmap(rect); + + for (int i = 0; i < kNumOffsetmaps / 2; ++i) { + float x_sum = 0.f; + float y_sum = 0.f; + float votes = 0.f; + for (int r = 0; r < heat.rows; ++r) { + for (int c = 0; c < heat.cols; ++c) { + const float belief = heat.at(r, c); + if (belief < config_.voting_threshold()) { + continue; + } + float offset_x = + offset.at>(r, c)[2 * i] * + offset_scale_x; + float offset_y = + offset.at>(r, c)[2 * i + 1] * + offset_scale_y; + float vote_x = c + rect.x + offset_x; + float vote_y = r + rect.y + offset_y; + float x_diff = std::abs(vote_x - center_votes[2 * i]); + float y_diff = std::abs(vote_y - center_votes[2 * i + 1]); + if (x_diff > config_.voting_allowance() || + y_diff > config_.voting_allowance()) { + continue; + } + x_sum += vote_x * belief; + y_sum += vote_y * belief; + votes += belief; + } + } + box->box_2d.emplace_back(x_sum / votes, y_sum / votes); + } +} + +bool Decoder::IsNewBox(std::vector* boxes, BeliefBox* box) const { + for (auto& b : *boxes) { + if (IsIdentical(b, *box)) { + if (b.belief < box->belief) { + std::swap(b, *box); + } + return false; + } + } + return true; +} + +bool Decoder::IsIdentical(const BeliefBox& box_1, + const BeliefBox& box_2) const { + // Skip the center point. + for (int i = 1; i < box_1.box_2d.size(); ++i) { + const float x_diff = + std::abs(box_1.box_2d[i].first - box_2.box_2d[i].first); + const float y_diff = + std::abs(box_1.box_2d[i].second - box_2.box_2d[i].second); + if (x_diff > config_.voting_allowance() || + y_diff > config_.voting_allowance()) { + return false; + } + } + return true; +} + +std::vector Decoder::ExtractCenterKeypoints( + const cv::Mat& center_heatmap) const { + cv::Mat max_filtered_heatmap(center_heatmap.rows, center_heatmap.cols, + center_heatmap.type()); + const int kernel_size = + static_cast(config_.local_max_distance() * 2 + 1 + 0.5f); + const cv::Size morph_size(kernel_size, kernel_size); + cv::dilate(center_heatmap, max_filtered_heatmap, + cv::getStructuringElement(cv::MORPH_RECT, morph_size)); + cv::Mat peak_map; + cv::bitwise_and((center_heatmap >= max_filtered_heatmap), + (center_heatmap >= config_.heatmap_threshold()), peak_map); + std::vector locations; // output, locations of non-zero pixels + cv::findNonZero(peak_map, locations); + return locations; +} + +absl::Status Decoder::Lift2DTo3D( + const Eigen::Matrix& projection_matrix, + bool portrait, FrameAnnotation* estimated_box) const { + CHECK(estimated_box != nullptr); + + for (auto& annotation : *estimated_box->mutable_annotations()) { + CHECK_EQ(kNumKeypoints, annotation.keypoints_size()); + + // Fill input 2D Points; + std::vector input_points_2d; + input_points_2d.reserve(kNumKeypoints); + for (const auto& keypoint : annotation.keypoints()) { + input_points_2d.emplace_back(keypoint.point_2d().x(), + keypoint.point_2d().y()); + } + + // Run EPnP. + std::vector output_points_3d; + output_points_3d.reserve(kNumKeypoints); + auto status = SolveEpnp(projection_matrix, portrait, input_points_2d, + &output_points_3d); + if (!status.ok()) { + LOG(ERROR) << status; + return status; + } + + // Fill 3D keypoints; + for (int i = 0; i < kNumKeypoints; ++i) { + SetPoint3d(output_points_3d[i], + annotation.mutable_keypoints(i)->mutable_point_3d()); + } + + // Fit a box to the 3D points to get box scale, rotation, translation. + Box box("category"); + box.Fit(output_points_3d); + const Eigen::Matrix rotation = + box.GetRotation(); + const Eigen::Vector3f translation = box.GetTranslation(); + const Eigen::Vector3f scale = box.GetScale(); + // Fill box rotation. + *annotation.mutable_rotation() = {rotation.data(), + rotation.data() + rotation.size()}; + // Fill box translation. + *annotation.mutable_translation() = { + translation.data(), translation.data() + translation.size()}; + // Fill box scale. + *annotation.mutable_scale() = {scale.data(), scale.data() + scale.size()}; + } + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/decoder.h b/mediapipe/modules/objectron/calculators/decoder.h new file mode 100644 index 0000000..be69939 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/decoder.h @@ -0,0 +1,109 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_DECODER_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_DECODER_H_ + +#include + +#include "Eigen/Dense" +#include "absl/status/status.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/belief_decoder_config.pb.h" + +namespace mediapipe { + +// Decodes 3D bounding box from heatmaps and offset maps. In the future, +// if we want to develop decoder for generic skeleton, then we need to +// generalize this class, and make a few child classes. +class Decoder { + public: + static const int kNumOffsetmaps; + + explicit Decoder(const BeliefDecoderConfig& config) : config_(config) { + epnp_alpha_ << 4.0f, -1.0f, -1.0f, -1.0f, 2.0f, -1.0f, -1.0f, 1.0f, 2.0f, + -1.0f, 1.0f, -1.0f, 0.0f, -1.0f, 1.0f, 1.0f, 2.0f, 1.0f, -1.0f, -1.0f, + 0.0f, 1.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, -1.0f, -2.0f, 1.0f, 1.0f, + 1.0f; + } + + // Decodes bounding boxes from predicted heatmap and offset maps. + // Input: + // heatmap: a single channel cv::Mat representing center point heatmap + // offsetmap: a 16 channel cv::Mat representing the 16 offset maps + // (2 for each of the 8 vertices) + // Output: + // Outputs 3D bounding boxes 2D vertices, represented by 'point_2d' field + // in each 'keypoints' field of object annotations. + FrameAnnotation DecodeBoundingBoxKeypoints(const cv::Mat& heatmap, + const cv::Mat& offsetmap) const; + + // Lifts the estimated 2D projections of bounding box vertices to 3D. + // This function uses the EPnP approach described in this paper: + // https://icwww.epfl.ch/~lepetit/papers/lepetit_ijcv08.pdf . + // Input: + // projection_matrix: the projection matrix from 3D coordinate + // to screen coordinate. + // The 2D screen coordinate is defined as: u is along the long + // edge of the device, pointing down; v is along the short edge + // of the device, pointing right. + // portrait: a boolen variable indicating whether our images are + // obtained in portrait orientation or not. + // estimated_box: annotation with point_2d field populated with + // 2d vertices. + // Output: + // estimated_box: annotation with point_3d field populated with + // 3d vertices. + absl::Status Lift2DTo3D( + const Eigen::Matrix& projection_matrix, + bool portrait, FrameAnnotation* estimated_box) const; + + private: + struct BeliefBox { + float belief; + std::vector> box_2d; + }; + + std::vector ExtractCenterKeypoints( + const cv::Mat& center_heatmap) const; + + // Decodes 2D keypoints at the peak point. + void DecodeByPeak(const cv::Mat& offsetmap, int center_x, int center_y, + float offset_scale_x, float offset_scale_y, + BeliefBox* box) const; + + // Decodes 2D keypoints by voting around the peak. + void DecodeByVoting(const cv::Mat& heatmap, const cv::Mat& offsetmap, + int center_x, int center_y, float offset_scale_x, + float offset_scale_y, BeliefBox* box) const; + + // Returns true if it is a new box. Otherwise, it may replace an existing box + // if the new box's belief is higher. + bool IsNewBox(std::vector* boxes, BeliefBox* box) const; + + // Returns true if the two boxes are identical. + bool IsIdentical(const BeliefBox& box_1, const BeliefBox& box_2) const; + + BeliefDecoderConfig config_; + // Following equation (1) in this paper + // https://icwww.epfl.ch/~lepetit/papers/lepetit_ijcv08.pdf, + // this variable denotes the coefficients for the 4 control points + // for each of the 8 3D box vertices. + Eigen::Matrix epnp_alpha_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_DECODER_H_ diff --git a/mediapipe/modules/objectron/calculators/epnp.cc b/mediapipe/modules/objectron/calculators/epnp.cc new file mode 100644 index 0000000..8bd7151 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/epnp.cc @@ -0,0 +1,167 @@ +// Copyright 2021 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/epnp.h" + +namespace mediapipe { + +namespace { + +// NUmber of keypoints. +constexpr int kNumKeypoints = 9; + +using Eigen::Map; +using Eigen::Matrix; +using Eigen::Matrix4f; +using Eigen::Vector2f; +using Eigen::Vector3f; + +} // namespace + +absl::Status SolveEpnp(const float focal_x, const float focal_y, + const float center_x, const float center_y, + const bool portrait, + const std::vector& input_points_2d, + std::vector* output_points_3d) { + if (input_points_2d.size() != kNumKeypoints) { + return absl::InvalidArgumentError( + absl::StrFormat("Input must has %d 2D points.", kNumKeypoints)); + } + + if (output_points_3d == nullptr) { + return absl::InvalidArgumentError( + "Output pointer output_points_3d is Null."); + } + + Matrix m = + Matrix::Zero(); + + Matrix epnp_alpha; + // The epnp_alpha is the Nx4 weight matrix from the EPnP paper, which is used + // to express the N box vertices as the weighted sum of 4 control points. The + // value of epnp_alpha is depedent on the set of control points been used. + // In our case we used the 4 control points as below (coordinates are in world + // coordinate system): + // c0 = (0.0, 0.0, 0.0) // Box center + // c1 = (1.0, 0.0, 0.0) // Right face center + // c2 = (0.0, 1.0, 0.0) // Top face center + // c3 = (0.0, 0.0, 1.0) // Front face center + // + // 3 + + + + + + + + 7 + // +\ +\ UP + // + \ + \ + // + \ + \ | + // + 4 + + + + + + + + 8 | y + // + + + + | + // + + + + | + // + + (0) + + .------- x + // + + + + \ + // 1 + + + + + + + + 5 + \ + // \ + \ + \ z + // \ + \ + \ + // \+ \+ + // 2 + + + + + + + + 6 + // + // For each box vertex shown above, we have the below weighted sum expression: + // v1 = c0 - (c1 - c0) - (c2 - c0) - (c3 - c0) = 4*c0 - c1 - c2 - c3; + // v2 = c0 - (c1 - c0) - (c2 - c0) + (c3 - c0) = 2*c0 - c1 - c2 + c3; + // v3 = c0 - (c1 - c0) + (c2 - c0) - (c3 - c0) = 2*c0 - c1 + c2 - c3; + // ... + // Thus we can determine the value of epnp_alpha as been used below. + // + // clang-format off + epnp_alpha << 4.0f, -1.0f, -1.0f, -1.0f, + 2.0f, -1.0f, -1.0f, 1.0f, + 2.0f, -1.0f, 1.0f, -1.0f, + 0.0f, -1.0f, 1.0f, 1.0f, + 2.0f, 1.0f, -1.0f, -1.0f, + 0.0f, 1.0f, -1.0f, 1.0f, + 0.0f, 1.0f, 1.0f, -1.0f, + -2.0f, 1.0f, 1.0f, 1.0f; + // clang-format on + + for (int i = 0; i < input_points_2d.size() - 1; ++i) { + // Skip 0th landmark which is object center. + const auto& point_2d = input_points_2d[i + 1]; + + // Convert 2d point from `pixel coordinates` to `NDC coordinates`([-1, 1]) + // following to the definitions in: + // https://google.github.io/mediapipe/solutions/objectron#ndc-space + // If portrait mode is been used, it's the caller's responsibility to + // convert the input 2d points' coordinates. + float x_ndc, y_ndc; + if (portrait) { + x_ndc = point_2d.y() * 2 - 1; + y_ndc = point_2d.x() * 2 - 1; + } else { + x_ndc = point_2d.x() * 2 - 1; + y_ndc = 1 - point_2d.y() * 2; + } + + for (int j = 0; j < 4; ++j) { + // For each of the 4 control points, formulate two rows of the + // m matrix (two equations). + const float control_alpha = epnp_alpha(i, j); + m(i * 2, j * 3) = focal_x * control_alpha; + m(i * 2, j * 3 + 2) = (center_x + x_ndc) * control_alpha; + m(i * 2 + 1, j * 3 + 1) = focal_y * control_alpha; + m(i * 2 + 1, j * 3 + 2) = (center_y + y_ndc) * control_alpha; + } + } + // This is a self adjoint matrix. Use SelfAdjointEigenSolver for a fast + // and stable solution. + Matrix mt_m = m.transpose() * m; + Eigen::SelfAdjointEigenSolver> eigen_solver(mt_m); + if (eigen_solver.info() != Eigen::Success) { + return absl::AbortedError("Eigen decomposition failed."); + } + CHECK_EQ(12, eigen_solver.eigenvalues().size()); + + // Eigenvalues are sorted in increasing order for SelfAdjointEigenSolver + // only! If you use other Eigen Solvers, it's not guaranteed to be in + // increasing order. Here, we just take the eigen vector corresponding + // to first/smallest eigen value, since we used SelfAdjointEigenSolver. + Eigen::VectorXf eigen_vec = eigen_solver.eigenvectors().col(0); + Map> control_matrix(eigen_vec.data()); + + // All 3D points should be in front of camera (z < 0). + if (control_matrix(0, 2) > 0) { + control_matrix = -control_matrix; + } + Matrix vertices = epnp_alpha * control_matrix; + + // Fill 0th 3D points. + output_points_3d->emplace_back(control_matrix(0, 0), control_matrix(0, 1), + control_matrix(0, 2)); + // Fill the rest 3D points. + for (int i = 0; i < kNumKeypoints - 1; ++i) { + output_points_3d->emplace_back(vertices(i, 0), vertices(i, 1), + vertices(i, 2)); + } + return absl::OkStatus(); +} + +absl::Status SolveEpnp(const Eigen::Matrix4f& projection_matrix, + const bool portrait, + const std::vector& input_points_2d, + std::vector* output_points_3d) { + const float focal_x = projection_matrix(0, 0); + const float focal_y = projection_matrix(1, 1); + const float center_x = projection_matrix(0, 2); + const float center_y = projection_matrix(1, 2); + return SolveEpnp(focal_x, focal_y, center_x, center_y, portrait, + input_points_2d, output_points_3d); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/epnp.h b/mediapipe/modules/objectron/calculators/epnp.h new file mode 100644 index 0000000..85be6f9 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/epnp.h @@ -0,0 +1,62 @@ +// Copyright 2021 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_EPNP_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_EPNP_H_ + +#include + +#include "Eigen/Dense" +#include "absl/status/status.h" +#include "absl/strings/str_format.h" +#include "mediapipe/framework/port/logging.h" + +namespace mediapipe { + +// This function performs EPnP algorithm, lifting normalized 2D points in pixel +// space to 3D points in camera coordinate. +// +// Inputs: +// focal_x: camera focal length along x. +// focal_y: camera focal length along y. +// center_x: camera center along x. +// center_y: camera center along y. +// portrait: a boolen variable indicating whether our images are obtained in +// portrait orientation or not. +// input_points_2d: input 2D points to be lifted to 3D. +// output_points_3d: ouput 3D points in camera coordinate. +absl::Status SolveEpnp(const float focal_x, const float focal_y, + const float center_x, const float center_y, + const bool portrait, + const std::vector& input_points_2d, + std::vector* output_points_3d); + +// This function performs EPnP algorithm, lifting normalized 2D points in pixel +// space to 3D points in camera coordinate. +// +// Inputs: +// projection_matrix: the projection matrix from 3D coordinate +// to screen coordinate. +// portrait: a boolen variable indicating whether our images are obtained in +// portrait orientation or not. +// input_points_2d: input 2D points to be lifted to 3D. +// output_points_3d: ouput 3D points in camera coordinate. +absl::Status SolveEpnp(const Eigen::Matrix4f& projection_matrix, + const bool portrait, + const std::vector& input_points_2d, + std::vector* output_points_3d); + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_EPNP_H_ diff --git a/mediapipe/modules/objectron/calculators/epnp_test.cc b/mediapipe/modules/objectron/calculators/epnp_test.cc new file mode 100644 index 0000000..8cf218a --- /dev/null +++ b/mediapipe/modules/objectron/calculators/epnp_test.cc @@ -0,0 +1,169 @@ +#include "mediapipe/modules/objectron/calculators/epnp.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/tool/test_util.h" + +namespace mediapipe { +namespace { + +using Eigen::AngleAxisf; +using Eigen::Map; +using Eigen::Matrix; +using Eigen::Matrix4f; +using Eigen::RowMajor; +using Eigen::Vector2f; +using Eigen::Vector3f; +using ::testing::HasSubstr; +using ::testing::Test; +using ::testing::status::StatusIs; +using Matrix3f = Eigen::Matrix; + +constexpr uint8_t kNumKeypoints = 9; + +// clang-format off +constexpr float kUnitBox[] = { 0.0f, 0.0f, 0.0f, + -0.5f, -0.5f, -0.5f, + -0.5f, -0.5f, 0.5f, + -0.5f, 0.5f, -0.5f, + -0.5f, 0.5f, 0.5f, + 0.5f, -0.5f, -0.5f, + 0.5f, -0.5f, 0.5f, + 0.5f, 0.5f, -0.5f, + 0.5f, 0.5f, 0.5f, }; +// clang-format on + +constexpr float kFocalX = 1.0f; +constexpr float kFocalY = 1.0f; +constexpr float kCenterX = 0.0f; +constexpr float kCenterY = 0.0f; + +constexpr float kAzimuth = 90.0f * M_PI / 180.0f; +constexpr float kElevation = 45.0f * M_PI / 180.0f; +constexpr float kTilt = 15.0f * M_PI / 180.0f; + +constexpr float kTranslationArray[] = {0.0f, 0.0f, -100.0f}; + +constexpr float kScaleArray[] = {50.0f, 50.0f, 50.0f}; + +class SolveEpnpTest : public Test { + protected: + SolveEpnpTest() {} + + void SetUp() override { + // Create vertices in world frame. + Map> vertices_w(kUnitBox); + + // Create Pose. + Matrix3f rotation; + rotation = AngleAxisf(kTilt, Vector3f::UnitZ()) * + AngleAxisf(kElevation, Vector3f::UnitX()) * + AngleAxisf(kAzimuth, Vector3f::UnitY()); + Map translation(kTranslationArray); + Map scale(kScaleArray); + + // Generate 3d vertices in camera frame. + const auto vertices_c = + ((rotation * scale.asDiagonal() * vertices_w.transpose()).colwise() + + translation) + .transpose(); + + // Generate input 2d points. + std::vector input_2d_points; + std::vector expected_3d_points; + for (int i = 0; i < kNumKeypoints; ++i) { + const auto x = vertices_c(i, 0); + const auto y = vertices_c(i, 1); + const auto z = vertices_c(i, 2); + + const float x_ndc = -kFocalX * x / z + kCenterX; + const float y_ndc = -kFocalY * y / z + kCenterY; + + const float x_pixel = (1.0f + x_ndc) / 2.0f; + const float y_pixel = (1.0f - y_ndc) / 2.0f; + + expected_3d_points_.emplace_back(x, y, z); + input_2d_points_.emplace_back(x_pixel, y_pixel); + } + } + + void VerifyOutput3dPoints(const std::vector& output_3d_points) { + EXPECT_EQ(kNumKeypoints, output_3d_points.size()); + const float scale = output_3d_points[0].z() / expected_3d_points_[0].z(); + for (int i = 0; i < kNumKeypoints; ++i) { + EXPECT_NEAR(output_3d_points[i].x(), expected_3d_points_[i].x() * scale, + 2.e-6f); + EXPECT_NEAR(output_3d_points[i].y(), expected_3d_points_[i].y() * scale, + 2.e-6f); + EXPECT_NEAR(output_3d_points[i].z(), expected_3d_points_[i].z() * scale, + 2.e-6f); + } + } + + std::vector input_2d_points_; + std::vector expected_3d_points_; +}; + +TEST_F(SolveEpnpTest, SolveEpnp) { + std::vector output_3d_points; + MP_ASSERT_OK(SolveEpnp(kFocalX, kFocalY, kCenterX, kCenterY, + /*portrait*/ false, input_2d_points_, + &output_3d_points)); + // Test output 3D points. + VerifyOutput3dPoints(output_3d_points); +} + +TEST_F(SolveEpnpTest, SolveEpnppPortrait) { + std::vector output_3d_points; + MP_ASSERT_OK(SolveEpnp(kFocalX, kFocalY, kCenterX, kCenterY, + /*portrait*/ true, input_2d_points_, + &output_3d_points)); + // Test output 3D points. + for (auto& point_3d : output_3d_points) { + const auto x = point_3d.x(); + const auto y = point_3d.y(); + // Convert from portrait mode to normal mode, y => x, x => -y. + point_3d.x() = y; + point_3d.y() = -x; + } + VerifyOutput3dPoints(output_3d_points); +} + +TEST_F(SolveEpnpTest, SolveEpnpProjectionMatrix) { + Matrix4f projection_matrix; + // clang-format off + projection_matrix << kFocalX, 0.0f, kCenterX, 0.0f, + 0.0f, kFocalY, kCenterY, 0.0f, + 0.0f, 0.0f, -1.0f, 0.0f, + 0.0f, 0.0f, -1.0f, 0.0f; + // clang-format on + + std::vector output_3d_points; + MP_ASSERT_OK(SolveEpnp(projection_matrix, /*portrait*/ false, + input_2d_points_, &output_3d_points)); + + // Test output 3D points. + VerifyOutput3dPoints(output_3d_points); +} + +TEST_F(SolveEpnpTest, BadInput2dPoints) { + // Generate empty input 2D points. + std::vector input_2d_points; + std::vector output_3d_points; + EXPECT_THAT(SolveEpnp(kFocalX, kFocalY, kCenterX, kCenterY, + /*portrait*/ false, input_2d_points, &output_3d_points), + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("Input must has"))); +} + +TEST_F(SolveEpnpTest, BadOutput3dPoints) { + // Generate null output 3D points. + std::vector* output_3d_points = nullptr; + EXPECT_THAT(SolveEpnp(kFocalX, kFocalY, kCenterX, kCenterY, + /*portrait*/ false, input_2d_points_, output_3d_points), + StatusIs(absl::StatusCode::kInvalidArgument, + "Output pointer output_points_3d is Null.")); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc b/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc new file mode 100644 index 0000000..0f29f9c --- /dev/null +++ b/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc @@ -0,0 +1,262 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/container/node_hash_set.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "absl/strings/strip.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/detection.pb.h" +#include "mediapipe/framework/formats/location_data.pb.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/map_util.h" +#include "mediapipe/framework/port/re2.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/filter_detection_calculator.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kDetectionTag[] = "DETECTION"; +constexpr char kDetectionsTag[] = "DETECTIONS"; +constexpr char kLabelsTag[] = "LABELS"; +constexpr char kLabelsCsvTag[] = "LABELS_CSV"; + +using mediapipe::RE2; +using Detections = std::vector; +using Strings = std::vector; + +} // namespace + +// Filters the entries in a Detection to only those with valid scores +// for the specified allowed labels. Allowed labels are provided as a +// vector in an optional input side packet. Allowed labels can +// contain simple strings or regular expressions. The valid score range +// can be set in the options.The allowed labels can be provided as +// vector (LABELS) or CSV std::string (LABELS_CSV) containing class +// names of allowed labels. Note: Providing an empty vector in the input side +// packet Packet causes this calculator to act as a sink if +// empty_allowed_labels_means_allow_everything is set to false (default value). +// To allow all labels, use the calculator with no input side packet stream, or +// set empty_allowed_labels_means_allow_everything to true. +// +// Example config: +// node { +// calculator: "FilterDetectionCalculator" +// input_stream: "DETECTIONS:detections" +// output_stream: "DETECTIONS:filtered_detections" +// input_side_packet: "LABELS:allowed_labels" +// options: { +// [mediapipe.FilterDetectionCalculatorOptions.ext]: { +// min_score: 0.5 +// } +// } +// } + +struct FirstGreaterComparator { + bool operator()(const std::pair& a, + const std::pair& b) const { + return a.first > b.first; + } +}; + +absl::Status SortLabelsByDecreasingScore(const Detection& detection, + Detection* sorted_detection) { + RET_CHECK(sorted_detection); + RET_CHECK_EQ(detection.score_size(), detection.label_size()); + if (!detection.label_id().empty()) { + RET_CHECK_EQ(detection.score_size(), detection.label_id_size()); + } + // Copies input to keep all fields unchanged, and to reserve space for + // repeated fields. Repeated fields (score, label, and label_id) will be + // overwritten. + *sorted_detection = detection; + + std::vector> scores_and_indices(detection.score_size()); + for (int i = 0; i < detection.score_size(); ++i) { + scores_and_indices[i].first = detection.score(i); + scores_and_indices[i].second = i; + } + + std::sort(scores_and_indices.begin(), scores_and_indices.end(), + FirstGreaterComparator()); + + for (int i = 0; i < detection.score_size(); ++i) { + const int index = scores_and_indices[i].second; + sorted_detection->set_score(i, detection.score(index)); + sorted_detection->set_label(i, detection.label(index)); + } + + if (!detection.label_id().empty()) { + for (int i = 0; i < detection.score_size(); ++i) { + const int index = scores_and_indices[i].second; + sorted_detection->set_label_id(i, detection.label_id(index)); + } + } + return absl::OkStatus(); +} + +class FilterDetectionCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + bool IsValidLabel(const std::string& label); + bool IsValidScore(float score); + // Stores numeric limits for filtering on the score. + FilterDetectionCalculatorOptions options_; + // We use the next two fields to possibly filter to a limited set of + // classes. The hash_set will be empty in two cases: 1) if no input + // side packet stream is provided (not filtering on labels), or 2) + // if the input side packet contains an empty vector (no labels are + // allowed). We use limit_labels_ to distinguish between the two cases. + bool limit_labels_ = true; + absl::node_hash_set allowed_labels_; +}; +REGISTER_CALCULATOR(FilterDetectionCalculator); + +absl::Status FilterDetectionCalculator::GetContract(CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kDetectionTag)) { + cc->Inputs().Tag(kDetectionTag).Set(); + cc->Outputs().Tag(kDetectionTag).Set(); + } + if (cc->Inputs().HasTag(kDetectionsTag)) { + cc->Inputs().Tag(kDetectionsTag).Set(); + cc->Outputs().Tag(kDetectionsTag).Set(); + } + if (cc->InputSidePackets().HasTag(kLabelsTag)) { + cc->InputSidePackets().Tag(kLabelsTag).Set(); + } + if (cc->InputSidePackets().HasTag(kLabelsCsvTag)) { + cc->InputSidePackets().Tag(kLabelsCsvTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status FilterDetectionCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + options_ = cc->Options(); + limit_labels_ = cc->InputSidePackets().HasTag(kLabelsTag) || + cc->InputSidePackets().HasTag(kLabelsCsvTag); + if (limit_labels_) { + Strings allowlist_labels; + if (cc->InputSidePackets().HasTag(kLabelsCsvTag)) { + allowlist_labels = absl::StrSplit( + cc->InputSidePackets().Tag(kLabelsCsvTag).Get(), ',', + absl::SkipWhitespace()); + for (auto& e : allowlist_labels) { + absl::StripAsciiWhitespace(&e); + } + } else { + allowlist_labels = cc->InputSidePackets().Tag(kLabelsTag).Get(); + } + allowed_labels_.insert(allowlist_labels.begin(), allowlist_labels.end()); + } + if (limit_labels_ && allowed_labels_.empty()) { + if (options_.fail_on_empty_labels()) { + cc->GetCounter("VideosWithEmptyLabelsAllowlist")->Increment(); + return tool::StatusFail( + "FilterDetectionCalculator received empty allowlist with " + "fail_on_empty_labels = true."); + } + if (options_.empty_allowed_labels_means_allow_everything()) { + // Continue as if side_input was not provided, i.e. pass all labels. + limit_labels_ = false; + } + } + return absl::OkStatus(); +} + +absl::Status FilterDetectionCalculator::Process(CalculatorContext* cc) { + if (limit_labels_ && allowed_labels_.empty()) { + return absl::OkStatus(); + } + Detections detections; + if (cc->Inputs().HasTag(kDetectionsTag)) { + detections = cc->Inputs().Tag(kDetectionsTag).Get(); + } else if (cc->Inputs().HasTag(kDetectionTag)) { + detections.emplace_back(cc->Inputs().Tag(kDetectionsTag).Get()); + } + std::unique_ptr outputs(new Detections); + for (const auto& input : detections) { + Detection output; + for (int i = 0; i < input.label_size(); ++i) { + const std::string& label = input.label(i); + const float score = input.score(i); + if (IsValidLabel(label) && IsValidScore(score)) { + output.add_label(label); + output.add_score(score); + } + } + if (output.label_size() > 0) { + if (input.has_location_data()) { + *output.mutable_location_data() = input.location_data(); + } + Detection output_sorted; + if (!SortLabelsByDecreasingScore(output, &output_sorted).ok()) { + // Uses the orginal output if fails to sort. + cc->GetCounter("FailedToSortLabelsInDetection")->Increment(); + output_sorted = output; + } + outputs->emplace_back(output_sorted); + } + } + + if (cc->Outputs().HasTag(kDetectionsTag)) { + cc->Outputs() + .Tag(kDetectionsTag) + .Add(outputs.release(), cc->InputTimestamp()); + } else if (!outputs->empty()) { + cc->Outputs() + .Tag(kDetectionsTag) + .Add(new Detection((*outputs)[0]), cc->InputTimestamp()); + } + return absl::OkStatus(); +} + +bool FilterDetectionCalculator::IsValidLabel(const std::string& label) { + bool match = !limit_labels_ || allowed_labels_.contains(label); + if (!match) { + // If no exact match is found, check for regular expression + // comparions in the allowed_labels. + for (const auto& label_regexp : allowed_labels_) { + match = match || RE2::FullMatch(label, RE2(label_regexp)); + } + } + return match; +} + +bool FilterDetectionCalculator::IsValidScore(float score) { + if (options_.has_min_score() && score < options_.min_score()) { + LOG(ERROR) << "Filter out detection with low score " << score; + return false; + } + if (options_.has_max_score() && score > options_.max_score()) { + LOG(ERROR) << "Filter out detection with high score " << score; + return false; + } + return true; +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/filter_detection_calculator.proto b/mediapipe/modules/objectron/calculators/filter_detection_calculator.proto new file mode 100644 index 0000000..ea79b8d --- /dev/null +++ b/mediapipe/modules/objectron/calculators/filter_detection_calculator.proto @@ -0,0 +1,45 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message FilterDetectionCalculatorOptions { + extend CalculatorOptions { + optional FilterDetectionCalculatorOptions ext = 339582987; + } + optional float min_score = 1; + optional float max_score = 2; + // Setting fail_on_empty_labels to true will cause the calculator to return a + // failure status on Open() if an empty list is provided on the external + // input, immediately terminating the graph run. + optional bool fail_on_empty_labels = 3 [default = false]; + // If fail_on_empty_labels is set to false setting + // empty_allowed_labels_means_allow_everything to + // false will cause the calculator to close output stream and ignore remaining + // inputs if an empty list is provided. If + // empty_allowed_labels_means_allow_everything is set to true this will force + // calculator to pass all labels. + optional bool empty_allowed_labels_means_allow_everything = 6 + [default = false]; + // Determines whether the input format is a vector (use-case object + // detectors) or Detection (use-case classifiers). + optional bool use_detection_vector = 4 [deprecated = true]; + // Determines whether the input side packet format is a vector of labels, or + // a string with comma separated labels. + optional bool use_allowed_labels_csv = 5 [deprecated = true]; +} diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.cc b/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.cc new file mode 100644 index 0000000..476f8cb --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.cc @@ -0,0 +1,177 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and + +#include +#include + +#include "Eigen/Dense" +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.pb.h" + +namespace mediapipe { + +using Matrix3fRM = Eigen::Matrix; +using Eigen::Vector2f; +using Eigen::Vector3f; + +namespace { + +constexpr char kInputFrameAnnotationTag[] = "FRAME_ANNOTATION"; +constexpr char kOutputNormRectsTag[] = "NORM_RECTS"; + +} // namespace + +// A calculator that converts FrameAnnotation proto to NormalizedRect. +// The rotation angle of the NormalizedRect is derived from object's 3d pose. +// The angle is calculated such that after rotation the 2d projection of y-axis. +// on the image plane is always vertical. +class FrameAnnotationToRectCalculator : public CalculatorBase { + public: + enum ViewStatus { + TOP_VIEW_ON, + TOP_VIEW_OFF, + }; + + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + void AddAnnotationToRect(const ObjectAnnotation& annotation, + std::vector* rect); + float RotationAngleFromAnnotation(const ObjectAnnotation& annotation); + + float RotationAngleFromPose(const Matrix3fRM& rotation, + const Vector3f& translation, const Vector3f& vec); + ViewStatus status_; + float off_threshold_; + float on_threshold_; +}; +REGISTER_CALCULATOR(FrameAnnotationToRectCalculator); + +absl::Status FrameAnnotationToRectCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputFrameAnnotationTag)) { + cc->Inputs().Tag(kInputFrameAnnotationTag).Set(); + } + + if (cc->Outputs().HasTag(kOutputNormRectsTag)) { + cc->Outputs().Tag(kOutputNormRectsTag).Set>(); + } + return absl::OkStatus(); +} + +absl::Status FrameAnnotationToRectCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + status_ = TOP_VIEW_OFF; + const auto& options = cc->Options(); + off_threshold_ = options.off_threshold(); + on_threshold_ = options.on_threshold(); + RET_CHECK(off_threshold_ <= on_threshold_); + return absl::OkStatus(); +} + +absl::Status FrameAnnotationToRectCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().Tag(kInputFrameAnnotationTag).IsEmpty()) { + return absl::OkStatus(); + } + auto output_rects = absl::make_unique>(); + const auto& frame_annotation = + cc->Inputs().Tag(kInputFrameAnnotationTag).Get(); + for (const auto& object_annotation : frame_annotation.annotations()) { + AddAnnotationToRect(object_annotation, output_rects.get()); + } + + // Output. + cc->Outputs() + .Tag(kOutputNormRectsTag) + .Add(output_rects.release(), cc->InputTimestamp()); + return absl::OkStatus(); +} + +void FrameAnnotationToRectCalculator::AddAnnotationToRect( + const ObjectAnnotation& annotation, std::vector* rects) { + float x_min = std::numeric_limits::max(); + float x_max = std::numeric_limits::min(); + float y_min = std::numeric_limits::max(); + float y_max = std::numeric_limits::min(); + for (const auto& keypoint : annotation.keypoints()) { + const auto& point_2d = keypoint.point_2d(); + x_min = std::min(x_min, point_2d.x()); + x_max = std::max(x_max, point_2d.x()); + y_min = std::min(y_min, point_2d.y()); + y_max = std::max(y_max, point_2d.y()); + } + NormalizedRect new_rect; + new_rect.set_x_center((x_min + x_max) / 2); + new_rect.set_y_center((y_min + y_max) / 2); + new_rect.set_width(x_max - x_min); + new_rect.set_height(y_max - y_min); + new_rect.set_rotation(RotationAngleFromAnnotation(annotation)); + rects->push_back(new_rect); +} + +float FrameAnnotationToRectCalculator::RotationAngleFromAnnotation( + const ObjectAnnotation& annotation) { + // Get box rotation and translation from annotation. + const auto box_rotation = + Eigen::Map(annotation.rotation().data()); + const auto box_translation = + Eigen::Map(annotation.translation().data()); + + // Rotation angle to use when top-view is on(top-view on), + // Which will make z-axis upright after the rotation. + const float angle_on = + RotationAngleFromPose(box_rotation, box_translation, Vector3f::UnitZ()); + // Rotation angle to use when side-view is on(top-view off), + // Which will make y-axis upright after the rotation. + const float angle_off = + RotationAngleFromPose(box_rotation, box_translation, Vector3f::UnitY()); + + // Calculate angle between z-axis and viewing ray in degrees. + const float view_to_z_angle = std::acos(box_rotation(2, 1)) * 180 / M_PI; + + // Determine threshold based on current status, + // on_threshold_ is used for TOP_VIEW_ON -> TOP_VIEW_OFF transition, + // off_threshold_ is used for TOP_VIEW_OFF -> TOP_VIEW_ON transition. + const float thresh = + (status_ == TOP_VIEW_ON) ? on_threshold_ : off_threshold_; + + // If view_to_z_angle is smaller than threshold, then top-view is on; + // Otherwise top-view is off. + status_ = (view_to_z_angle < thresh) ? TOP_VIEW_ON : TOP_VIEW_OFF; + + // Determine which angle to used based on current status_. + float angle_to_rotate = (status_ == TOP_VIEW_ON) ? angle_on : angle_off; + return angle_to_rotate; +} + +float FrameAnnotationToRectCalculator::RotationAngleFromPose( + const Matrix3fRM& rotation, const Vector3f& translation, + const Vector3f& vec) { + auto p1 = rotation * vec + translation; + auto p2 = -rotation * vec + translation; + const float dy = p2[2] * p2[1] - p1[2] * p1[1]; + const float dx = p2[2] * p2[0] - p1[2] * p1[0]; + return M_PI / 2 - std::atan2(dy, dx); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.proto b/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.proto new file mode 100644 index 0000000..8959cb8 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_to_rect_calculator.proto @@ -0,0 +1,31 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message FrameAnnotationToRectCalculatorOptions { + extend CalculatorOptions { + optional FrameAnnotationToRectCalculatorOptions ext = 338119067; + } + + // The threshold to use when top-view is off,to enable hysteresis, + // It's required that off_threshold <= on_threshold. + optional float off_threshold = 1 [default = 40.0]; + // The threshold to use when top-view is on. + optional float on_threshold = 2 [default = 41.0]; +} diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_to_timed_box_list_calculator.cc b/mediapipe/modules/objectron/calculators/frame_annotation_to_timed_box_list_calculator.cc new file mode 100644 index 0000000..7467880 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_to_timed_box_list_calculator.cc @@ -0,0 +1,115 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/box_util.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace { +constexpr char kInputStreamTag[] = "FRAME_ANNOTATION"; +constexpr char kOutputStreamTag[] = "BOXES"; +} // namespace + +namespace mediapipe { + +// Convert FrameAnnotation 3d bounding box detections to TimedBoxListProto +// 2d bounding boxes. +// +// Input: +// FRAME_ANNOTATION - 3d bounding box annotation. +// Output: +// BOXES - 2d bounding box enclosing the projection of 3d box. +// +// Usage example: +// node { +// calculator: "FrameAnnotationToTimedBoxListCalculator" +// input_stream: "FRAME_ANNOTATION:frame_annotation" +// output_stream: "BOXES:boxes" +// } +class FrameAnnotationToTimedBoxListCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + absl::Status Close(CalculatorContext* cc) override; +}; +REGISTER_CALCULATOR(FrameAnnotationToTimedBoxListCalculator); + +absl::Status FrameAnnotationToTimedBoxListCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputStreamTag)) { + cc->Inputs().Tag(kInputStreamTag).Set(); + } + + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs().Tag(kOutputStreamTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status FrameAnnotationToTimedBoxListCalculator::Open( + CalculatorContext* cc) { + return absl::OkStatus(); +} + +absl::Status FrameAnnotationToTimedBoxListCalculator::Process( + CalculatorContext* cc) { + if (cc->Inputs().HasTag(kInputStreamTag) && + !cc->Inputs().Tag(kInputStreamTag).IsEmpty()) { + const auto& frame_annotation = + cc->Inputs().Tag(kInputStreamTag).Get(); + auto output_objects = absl::make_unique(); + for (const auto& annotation : frame_annotation.annotations()) { + std::vector key_points; + for (const auto& keypoint : annotation.keypoints()) { + key_points.push_back( + cv::Point2f(keypoint.point_2d().x(), keypoint.point_2d().y())); + } + TimedBoxProto* added_box = output_objects->add_box(); + ComputeBoundingRect(key_points, added_box); + added_box->set_id(annotation.object_id()); + const int64 time_msec = + static_cast(std::round(frame_annotation.timestamp() / 1000)); + added_box->set_time_msec(time_msec); + } + + // Output + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs() + .Tag(kOutputStreamTag) + .Add(output_objects.release(), cc->InputTimestamp()); + } + } + + return absl::OkStatus(); +} + +absl::Status FrameAnnotationToTimedBoxListCalculator::Close( + CalculatorContext* cc) { + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc new file mode 100644 index 0000000..eebf885 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc @@ -0,0 +1,102 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" + +#include "absl/container/flat_hash_set.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/box_util.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { + +void FrameAnnotationTracker::AddDetectionResult( + const FrameAnnotation& frame_annotation) { + const int64 time_us = + static_cast(std::round(frame_annotation.timestamp())); + for (const auto& object_annotation : frame_annotation.annotations()) { + detected_objects_[time_us + object_annotation.object_id()] = + object_annotation; + } +} + +FrameAnnotation FrameAnnotationTracker::ConsolidateTrackingResult( + const TimedBoxProtoList& tracked_boxes, + absl::flat_hash_set* cancel_object_ids) { + CHECK(cancel_object_ids != nullptr); + FrameAnnotation frame_annotation; + std::vector keys_to_be_deleted; + for (const auto& detected_obj : detected_objects_) { + const int object_id = detected_obj.second.object_id(); + if (cancel_object_ids->contains(object_id)) { + // Remember duplicated detections' keys. + keys_to_be_deleted.push_back(detected_obj.first); + continue; + } + TimedBoxProto ref_box; + for (const auto& box : tracked_boxes.box()) { + if (box.id() == object_id) { + ref_box = box; + break; + } + } + if (!ref_box.has_id() || ref_box.id() < 0) { + LOG(ERROR) << "Can't find matching tracked box for object id: " + << object_id << ". Likely lost tracking of it."; + keys_to_be_deleted.push_back(detected_obj.first); + continue; + } + + // Find duplicated boxes + for (const auto& box : tracked_boxes.box()) { + if (box.id() != object_id) { + if (ComputeBoxIoU(ref_box, box) > iou_threshold_) { + cancel_object_ids->insert(box.id()); + } + } + } + + // Map ObjectAnnotation from detection to tracked time. + // First, gather all keypoints from source detection. + std::vector key_points; + for (const auto& keypoint : detected_obj.second.keypoints()) { + key_points.push_back( + cv::Point2f(keypoint.point_2d().x(), keypoint.point_2d().y())); + } + // Second, find source box. + TimedBoxProto src_box; + ComputeBoundingRect(key_points, &src_box); + ObjectAnnotation* tracked_obj = frame_annotation.add_annotations(); + tracked_obj->set_object_id(ref_box.id()); + // Finally, map all keypoints in the source detection to tracked location. + for (const auto& keypoint : detected_obj.second.keypoints()) { + cv::Point2f dst = MapPoint( + src_box, ref_box, + cv::Point2f(keypoint.point_2d().x(), keypoint.point_2d().y()), + img_width_, img_height_); + auto* dst_point = tracked_obj->add_keypoints()->mutable_point_2d(); + dst_point->set_x(dst.x); + dst_point->set_y(dst.y); + } + } + + for (const auto& key : keys_to_be_deleted) { + detected_objects_.erase(key); + } + + return frame_annotation; +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.h b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.h new file mode 100644 index 0000000..11a469c --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.h @@ -0,0 +1,62 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_FRAME_ANNOTATION_TRACKER_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_FRAME_ANNOTATION_TRACKER_H_ + +#include + +#include "absl/container/btree_map.h" +#include "absl/container/flat_hash_set.h" +#include "mediapipe/framework/port/integral_types.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { + +class FrameAnnotationTracker { + public: + // If two bounding boxes have IoU over iou_threshold, then we consider them + // describing the same object. + FrameAnnotationTracker(float iou_threshold, float img_width, float img_height) + : iou_threshold_(iou_threshold), + img_width_(img_width), + img_height_(img_height) {} + + // Adds detection results from an external detector. + void AddDetectionResult(const FrameAnnotation& frame_annotation); + + // Consolidates tracking result from an external tracker, associates with + // the detection result by the object id, and produces the corresponding + // result in FrameAnnotation. When there are duplicates, output the ids that + // need to be cancelled in cancel_object_ids. + // Note that the returned FrameAnnotation is missing timestamp. Need to fill + // that field. + FrameAnnotation ConsolidateTrackingResult( + const TimedBoxProtoList& tracked_boxes, + absl::flat_hash_set* cancel_object_ids); + + private: + float iou_threshold_; + float img_width_; + float img_height_; + // Cached detection results over time. + // Key is timestamp_us + object_id. + absl::btree_map> + detected_objects_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_FRAME_ANNOTATION_TRACKER_H_ diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.cc new file mode 100644 index 0000000..9079b9a --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.cc @@ -0,0 +1,134 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/container/flat_hash_set.h" +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" +#include "mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.pb.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace { +constexpr char kInputFrameAnnotationTag[] = "FRAME_ANNOTATION"; +constexpr char kInputTrackedBoxesTag[] = "TRACKED_BOXES"; +constexpr char kOutputTrackedFrameAnnotationTag[] = "TRACKED_FRAME_ANNOTATION"; +constexpr char kOutputCancelObjectIdTag[] = "CANCEL_OBJECT_ID"; +} // namespace + +namespace mediapipe { + +// Tracks frame annotations seeded/updated by FRAME_ANNOTATION input_stream. +// When using this calculator, make sure FRAME_ANNOTATION and TRACKED_BOXES +// are in different sync set. +// +// Input: +// FRAME_ANNOTATION - frame annotation. +// TRACKED_BOXES - 2d box tracking result +// Output: +// TRACKED_FRAME_ANNOTATION - annotation inferred from 2d tracking result. +// CANCEL_OBJECT_ID - object id that needs to be cancelled from the tracker. +// +// Usage example: +// node { +// calculator: "FrameAnnotationTrackerCalculator" +// input_stream: "FRAME_ANNOTATION:frame_annotation" +// input_stream: "TRACKED_BOXES:tracked_boxes" +// output_stream: "TRACKED_FRAME_ANNOTATION:tracked_frame_annotation" +// output_stream: "CANCEL_OBJECT_ID:cancel_object_id" +// } +class FrameAnnotationTrackerCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + absl::Status Close(CalculatorContext* cc) override; + + private: + std::unique_ptr frame_annotation_tracker_; +}; +REGISTER_CALCULATOR(FrameAnnotationTrackerCalculator); + +absl::Status FrameAnnotationTrackerCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputFrameAnnotationTag)) { + cc->Inputs().Tag(kInputFrameAnnotationTag).Set(); + } + if (cc->Inputs().HasTag(kInputTrackedBoxesTag)) { + cc->Inputs().Tag(kInputTrackedBoxesTag).Set(); + } + if (cc->Outputs().HasTag(kOutputTrackedFrameAnnotationTag)) { + cc->Outputs().Tag(kOutputTrackedFrameAnnotationTag).Set(); + } + if (cc->Outputs().HasTag(kOutputCancelObjectIdTag)) { + cc->Outputs().Tag(kOutputCancelObjectIdTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status FrameAnnotationTrackerCalculator::Open(CalculatorContext* cc) { + const auto& options = cc->Options(); + frame_annotation_tracker_ = absl::make_unique( + options.iou_threshold(), options.img_width(), options.img_height()); + return absl::OkStatus(); +} + +absl::Status FrameAnnotationTrackerCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().HasTag(kInputFrameAnnotationTag) && + !cc->Inputs().Tag(kInputFrameAnnotationTag).IsEmpty()) { + frame_annotation_tracker_->AddDetectionResult( + cc->Inputs().Tag(kInputFrameAnnotationTag).Get()); + } + if (cc->Inputs().HasTag(kInputTrackedBoxesTag) && + !cc->Inputs().Tag(kInputTrackedBoxesTag).IsEmpty() && + cc->Outputs().HasTag(kOutputTrackedFrameAnnotationTag)) { + absl::flat_hash_set cancel_object_ids; + auto output_frame_annotation = absl::make_unique(); + *output_frame_annotation = + frame_annotation_tracker_->ConsolidateTrackingResult( + cc->Inputs().Tag(kInputTrackedBoxesTag).Get(), + &cancel_object_ids); + output_frame_annotation->set_timestamp(cc->InputTimestamp().Microseconds()); + + cc->Outputs() + .Tag(kOutputTrackedFrameAnnotationTag) + .Add(output_frame_annotation.release(), cc->InputTimestamp()); + + if (cc->Outputs().HasTag(kOutputCancelObjectIdTag)) { + auto packet_timestamp = cc->InputTimestamp(); + for (const auto& id : cancel_object_ids) { + // The timestamp is incremented (by 1 us) because currently the box + // tracker calculator only accepts one cancel object ID for any given + // timestamp. + cc->Outputs() + .Tag(kOutputCancelObjectIdTag) + .AddPacket(mediapipe::MakePacket(id).At(packet_timestamp++)); + } + } + } + + return absl::OkStatus(); +} + +absl::Status FrameAnnotationTrackerCalculator::Close(CalculatorContext* cc) { + return absl::OkStatus(); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.proto b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.proto new file mode 100644 index 0000000..f37308a --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_calculator.proto @@ -0,0 +1,36 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The option proto for the FrameAnnotationTrackerCalculatorOptions. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message FrameAnnotationTrackerCalculatorOptions { + extend CalculatorOptions { + optional FrameAnnotationTrackerCalculatorOptions ext = 291291253; + } + + // The threshold on intersection-over-union (IoU). We consider + // boxes with IoU larger than this threshold to be the duplicates. + optional float iou_threshold = 1 [default = 0.5]; + + // We need image dimension to properly compute annotation locations. + optional float img_width = 2; + + optional float img_height = 3; +} diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc new file mode 100644 index 0000000..d155f8e --- /dev/null +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc @@ -0,0 +1,143 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" + +#include "absl/container/flat_hash_set.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/util/tracking/box_tracker.pb.h" + +namespace mediapipe { +namespace { + +// Create a new object annotation by shifting a reference +// object annotation. +ObjectAnnotation ShiftObject2d(const ObjectAnnotation& ref_obj, float dx, + float dy) { + ObjectAnnotation obj = ref_obj; + for (auto& keypoint : *(obj.mutable_keypoints())) { + const float ref_x = keypoint.point_2d().x(); + const float ref_y = keypoint.point_2d().y(); + keypoint.mutable_point_2d()->set_x(ref_x + dx); + keypoint.mutable_point_2d()->set_y(ref_y + dy); + } + return obj; +} + +TimedBoxProto ShiftBox(const TimedBoxProto& ref_box, float dx, float dy) { + TimedBoxProto box = ref_box; + box.set_top(ref_box.top() + dy); + box.set_bottom(ref_box.bottom() + dy); + box.set_left(ref_box.left() + dx); + box.set_right(ref_box.right() + dx); + return box; +} + +// Constructs a fixed ObjectAnnotation. +ObjectAnnotation ConstructFixedObject( + const std::vector>& points) { + ObjectAnnotation obj; + for (const auto& point : points) { + auto* keypoint = obj.add_keypoints(); + CHECK_EQ(2, point.size()); + keypoint->mutable_point_2d()->set_x(point[0]); + keypoint->mutable_point_2d()->set_y(point[1]); + } + return obj; +} + +TEST(FrameAnnotationTrackerTest, TestConsolidation) { + // Add 4 detections represented by FrameAnnotation, of which 3 correspond + // to the same object. + ObjectAnnotation object1, object2, object3, object4; + // The bounding rectangle for these object keypoints is: + // x: [0.2, 0.5], y: [0.1, 0.4] + object3 = ConstructFixedObject({{0.35f, 0.25f}, + {0.3f, 0.3f}, + {0.2f, 0.4f}, + {0.3f, 0.1f}, + {0.2f, 0.2f}, + {0.5f, 0.3f}, + {0.4f, 0.4f}, + {0.5f, 0.1f}, + {0.4f, 0.2f}}); + object3.set_object_id(3); + object1 = ShiftObject2d(object3, -0.05f, -0.05f); + object1.set_object_id(1); + object2 = ShiftObject2d(object3, 0.05f, 0.05f); + object2.set_object_id(2); + object4 = ShiftObject2d(object3, 0.2f, 0.2f); + object4.set_object_id(4); + FrameAnnotation frame_annotation_1; + frame_annotation_1.set_timestamp(30 * 1000); // 30ms + *(frame_annotation_1.add_annotations()) = object1; + *(frame_annotation_1.add_annotations()) = object4; + FrameAnnotation frame_annotation_2; + frame_annotation_2.set_timestamp(60 * 1000); // 60ms + *(frame_annotation_2.add_annotations()) = object2; + FrameAnnotation frame_annotation_3; + frame_annotation_3.set_timestamp(90 * 1000); // 90ms + *(frame_annotation_3.add_annotations()) = object3; + + FrameAnnotationTracker frame_annotation_tracker(/*iou_threshold*/ 0.5f, 1.0f, + 1.0f); + frame_annotation_tracker.AddDetectionResult(frame_annotation_1); + frame_annotation_tracker.AddDetectionResult(frame_annotation_2); + frame_annotation_tracker.AddDetectionResult(frame_annotation_3); + + TimedBoxProtoList timed_box_proto_list; + TimedBoxProto* timed_box_proto = timed_box_proto_list.add_box(); + timed_box_proto->set_top(0.4f); + timed_box_proto->set_bottom(0.7f); + timed_box_proto->set_left(0.6f); + timed_box_proto->set_right(0.9f); + timed_box_proto->set_id(3); + timed_box_proto->set_time_msec(150); + timed_box_proto = timed_box_proto_list.add_box(); + *timed_box_proto = ShiftBox(timed_box_proto_list.box(0), 0.01f, 0.01f); + timed_box_proto->set_id(1); + timed_box_proto->set_time_msec(150); + timed_box_proto = timed_box_proto_list.add_box(); + *timed_box_proto = ShiftBox(timed_box_proto_list.box(0), -0.01f, -0.01f); + timed_box_proto->set_id(2); + timed_box_proto->set_time_msec(150); + absl::flat_hash_set cancel_object_ids; + FrameAnnotation tracked_detection = + frame_annotation_tracker.ConsolidateTrackingResult(timed_box_proto_list, + &cancel_object_ids); + EXPECT_EQ(2, cancel_object_ids.size()); + EXPECT_EQ(1, cancel_object_ids.count(1)); + EXPECT_EQ(1, cancel_object_ids.count(2)); + EXPECT_EQ(1, tracked_detection.annotations_size()); + EXPECT_EQ(3, tracked_detection.annotations(0).object_id()); + EXPECT_EQ(object3.keypoints_size(), + tracked_detection.annotations(0).keypoints_size()); + const float x_offset = 0.4f; + const float y_offset = 0.3f; + const float tolerance = 1e-5f; + for (int i = 0; i < object3.keypoints_size(); ++i) { + const auto& point_2d = + tracked_detection.annotations(0).keypoints(i).point_2d(); + EXPECT_NEAR(point_2d.x(), object3.keypoints(i).point_2d().x() + x_offset, + tolerance); + EXPECT_NEAR(point_2d.y(), object3.keypoints(i).point_2d().y() + y_offset, + tolerance); + } +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/landmarks_to_frame_annotation_calculator.cc b/mediapipe/modules/objectron/calculators/landmarks_to_frame_annotation_calculator.cc new file mode 100644 index 0000000..60c4876 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/landmarks_to_frame_annotation_calculator.cc @@ -0,0 +1,112 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and + +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" + +namespace mediapipe { + +namespace { + +constexpr char kInputLandmarksTag[] = "LANDMARKS"; +constexpr char kInputMultiLandmarksTag[] = "MULTI_LANDMARKS"; +constexpr char kOutputFrameAnnotationTag[] = "FRAME_ANNOTATION"; + +} // namespace + +// A calculator that converts NormalizedLandmarkList to FrameAnnotation proto. +class LandmarksToFrameAnnotationCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + void AddLandmarksToFrameAnnotation(const NormalizedLandmarkList& landmarks, + FrameAnnotation* frame_annotation); +}; +REGISTER_CALCULATOR(LandmarksToFrameAnnotationCalculator); + +absl::Status LandmarksToFrameAnnotationCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputLandmarksTag)) { + cc->Inputs().Tag(kInputLandmarksTag).Set(); + } + if (cc->Inputs().HasTag(kInputMultiLandmarksTag)) { + cc->Inputs() + .Tag(kInputMultiLandmarksTag) + .Set>(); + } + if (cc->Outputs().HasTag(kOutputFrameAnnotationTag)) { + cc->Outputs().Tag(kOutputFrameAnnotationTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status LandmarksToFrameAnnotationCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); +} + +absl::Status LandmarksToFrameAnnotationCalculator::Process( + CalculatorContext* cc) { + auto frame_annotation = absl::make_unique(); + + // Handle the case when input has only one NormalizedLandmarkList. + if (cc->Inputs().HasTag(kInputLandmarksTag) && + !cc->Inputs().Tag(kInputLandmarksTag).IsEmpty()) { + const auto& landmarks = + cc->Inputs().Tag(kInputMultiLandmarksTag).Get(); + AddLandmarksToFrameAnnotation(landmarks, frame_annotation.get()); + } + + // Handle the case when input has muliple NormalizedLandmarkList. + if (cc->Inputs().HasTag(kInputMultiLandmarksTag) && + !cc->Inputs().Tag(kInputMultiLandmarksTag).IsEmpty()) { + const auto& landmarks_list = + cc->Inputs() + .Tag(kInputMultiLandmarksTag) + .Get>(); + for (const auto& landmarks : landmarks_list) { + AddLandmarksToFrameAnnotation(landmarks, frame_annotation.get()); + } + } + + // Output + if (cc->Outputs().HasTag(kOutputFrameAnnotationTag)) { + cc->Outputs() + .Tag(kOutputFrameAnnotationTag) + .Add(frame_annotation.release(), cc->InputTimestamp()); + } + return absl::OkStatus(); +} + +void LandmarksToFrameAnnotationCalculator::AddLandmarksToFrameAnnotation( + const NormalizedLandmarkList& landmarks, + FrameAnnotation* frame_annotation) { + auto* new_annotation = frame_annotation->add_annotations(); + for (const auto& landmark : landmarks.landmark()) { + auto* point2d = new_annotation->add_keypoints()->mutable_point_2d(); + point2d->set_x(landmark.x()); + point2d->set_y(landmark.y()); + } +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc new file mode 100644 index 0000000..1405e5a --- /dev/null +++ b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc @@ -0,0 +1,169 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "Eigen/Dense" +#include "absl/memory/memory.h" +#include "absl/strings/str_format.h" +#include "absl/types/span.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/decoder.h" +#include "mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.pb.h" +#include "mediapipe/modules/objectron/calculators/tensor_util.h" + +namespace { +constexpr char kInputStreamTag[] = "FRAME_ANNOTATION"; +constexpr char kOutputStreamTag[] = "LIFTED_FRAME_ANNOTATION"; + +// Each detection object will be assigned an unique id that starts from 1. +static int object_id = 0; + +inline int GetNextObjectId() { return ++object_id; } +} // namespace + +namespace mediapipe { + +// Lifted the 2D points in a tracked frame annotation to 3D. +// +// Input: +// FRAME_ANNOTATIONS - Frame annotations with detected 2D points +// Output: +// LIFTED_FRAME_ANNOTATIONS - Result FrameAnnotation with lifted 3D points. +// +// Usage example: +// node { +// calculator: "Lift2DFrameAnnotationTo3DCalculator" +// input_stream: "FRAME_ANNOTATIONS:tracked_annotations" +// output_stream: "LIFTED_FRAME_ANNOTATIONS:lifted_3d_annotations" +// } +class Lift2DFrameAnnotationTo3DCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + absl::Status Close(CalculatorContext* cc) override; + + private: + absl::Status ProcessCPU(CalculatorContext* cc, + FrameAnnotation* output_objects); + absl::Status LoadOptions(CalculatorContext* cc); + + // Increment and assign object ID for each detected object. + // In a single MediaPipe session, the IDs are unique. + // Also assign timestamp for the FrameAnnotation to be the input packet + // timestamp. + void AssignObjectIdAndTimestamp(int64 timestamp_us, + FrameAnnotation* annotation); + std::unique_ptr decoder_; + Lift2DFrameAnnotationTo3DCalculatorOptions options_; + Eigen::Matrix projection_matrix_; +}; +REGISTER_CALCULATOR(Lift2DFrameAnnotationTo3DCalculator); + +absl::Status Lift2DFrameAnnotationTo3DCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(cc->Inputs().HasTag(kInputStreamTag)); + RET_CHECK(cc->Outputs().HasTag(kOutputStreamTag)); + cc->Inputs().Tag(kInputStreamTag).Set(); + cc->Outputs().Tag(kOutputStreamTag).Set(); + + return absl::OkStatus(); +} + +absl::Status Lift2DFrameAnnotationTo3DCalculator::Open(CalculatorContext* cc) { + cc->SetOffset(TimestampDiff(0)); + MP_RETURN_IF_ERROR(LoadOptions(cc)); + // Load camera intrinsic matrix. + const float fx = options_.normalized_focal_x(); + const float fy = options_.normalized_focal_y(); + const float px = options_.normalized_principal_point_x(); + const float py = options_.normalized_principal_point_y(); + // clang-format off + projection_matrix_ << fx, 0., px, 0., + 0., fy, py, 0., + 0., 0., -1., 0., + 0., 0., -1., 0.; + // clang-format on + decoder_ = absl::make_unique( + BeliefDecoderConfig(options_.decoder_config())); + return absl::OkStatus(); +} + +absl::Status Lift2DFrameAnnotationTo3DCalculator::Process( + CalculatorContext* cc) { + if (cc->Inputs().Tag(kInputStreamTag).IsEmpty()) { + return absl::OkStatus(); + } + + auto output_objects = absl::make_unique(); + + MP_RETURN_IF_ERROR(ProcessCPU(cc, output_objects.get())); + + // Output + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs() + .Tag(kOutputStreamTag) + .Add(output_objects.release(), cc->InputTimestamp()); + } + + return absl::OkStatus(); +} + +absl::Status Lift2DFrameAnnotationTo3DCalculator::ProcessCPU( + CalculatorContext* cc, FrameAnnotation* output_objects) { + const auto& input_frame_annotations = + cc->Inputs().Tag(kInputStreamTag).Get(); + // Copy the input frame annotation to the output + *output_objects = input_frame_annotations; + + auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ false, + output_objects); + if (!status.ok()) { + LOG(ERROR) << status; + return status; + } + AssignObjectIdAndTimestamp(cc->InputTimestamp().Microseconds(), + output_objects); + + return absl::OkStatus(); +} + +absl::Status Lift2DFrameAnnotationTo3DCalculator::Close(CalculatorContext* cc) { + return absl::OkStatus(); +} + +absl::Status Lift2DFrameAnnotationTo3DCalculator::LoadOptions( + CalculatorContext* cc) { + // Get calculator options specified in the graph. + options_ = cc->Options(); + + return absl::OkStatus(); +} + +void Lift2DFrameAnnotationTo3DCalculator::AssignObjectIdAndTimestamp( + int64 timestamp_us, FrameAnnotation* annotation) { + for (auto& ann : *annotation->mutable_annotations()) { + ann.set_object_id(GetNextObjectId()); + } + annotation->set_timestamp(timestamp_us); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.proto b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.proto new file mode 100644 index 0000000..a3005c1 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.proto @@ -0,0 +1,42 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The option proto for the Lift2DFrameAnnotationTo3DCalculatorOptions. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/modules/objectron/calculators/belief_decoder_config.proto"; + +message Lift2DFrameAnnotationTo3DCalculatorOptions { + extend CalculatorOptions { + optional Lift2DFrameAnnotationTo3DCalculatorOptions ext = 290166284; + } + + optional BeliefDecoderConfig decoder_config = 1; + + // Camera focal length along x, normalized by width/2. + optional float normalized_focal_x = 2 [default = 1.0]; + + // Camera focal length along y, normalized by height/2. + optional float normalized_focal_y = 3 [default = 1.0]; + + // Camera principle point x, normalized by width/2, origin is image center. + optional float normalized_principal_point_x = 4 [default = 0.0]; + + // Camera principle point y, normalized by height/2, origin is image center. + optional float normalized_principal_point_y = 5 [default = 0.0]; +} diff --git a/mediapipe/modules/objectron/calculators/model.cc b/mediapipe/modules/objectron/calculators/model.cc new file mode 100644 index 0000000..40aca39 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/model.cc @@ -0,0 +1,101 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/model.h" + +#include "mediapipe/framework/port/logging.h" + +namespace mediapipe { + +void Model::SetTransformation(const Eigen::Matrix4f& transform) { + transformation_ = transform; +} + +void Model::SetTranslation(const Eigen::Vector3f& translation) { + transformation_.col(3).template head<3>() = translation; +} + +void Model::SetRotation(float roll, float pitch, float yaw) { + // In our coordinate system, Y is up. We first rotate the object around Y + // (yaw), then around Z (pitch), and finally around X (roll). + Eigen::Matrix3f r; + r = Eigen::AngleAxisf(yaw, Eigen::Vector3f::UnitY()) * + Eigen::AngleAxisf(pitch, Eigen::Vector3f::UnitZ()) * + Eigen::AngleAxisf(roll, Eigen::Vector3f::UnitX()); + transformation_.topLeftCorner<3, 3>() = r; +} + +void Model::SetRotation(const Eigen::Matrix3f& rotation) { + transformation_.topLeftCorner<3, 3>() = rotation; +} + +void Model::SetScale(const Eigen::Vector3f& scale) { scale_ = scale; } + +void Model::SetCategory(const std::string& category) { category_ = category; } + +const Eigen::Vector3f Model::GetRotationAngles() const { + Vector3f ypr = transformation_.topLeftCorner<3, 3>().eulerAngles(1, 2, 0); + return Vector3f(ypr(2), ypr(1), ypr(0)); // swap YPR with RPY +} + +const Eigen::Matrix4f& Model::GetTransformation() const { + return transformation_; +} + +const Eigen::Vector3f& Model::GetScale() const { return scale_; } + +const Eigen::Ref Model::GetTranslation() const { + return transformation_.col(3).template head<3>(); +} + +const Eigen::Ref Model::GetRotation() const { + return transformation_.template topLeftCorner<3, 3>(); +} + +const std::string& Model::GetCategory() const { return category_; } + +void Model::Deserialize(const Object& obj) { + CHECK_EQ(obj.rotation_size(), 9); + CHECK_EQ(obj.translation_size(), 3); + CHECK_EQ(obj.scale_size(), 3); + category_ = obj.category(); + + using RotationMatrix = Eigen::Matrix; + transformation_.setIdentity(); + transformation_.topLeftCorner<3, 3>() = + Eigen::Map(obj.rotation().data()); + transformation_.col(3).head<3>() = + Eigen::Map(obj.translation().data()); + scale_ = Eigen::Map(obj.scale().data()); + Update(); +} + +void Model::Serialize(Object* obj) { + obj->set_category(category_); + for (int i = 0; i < 3; ++i) { + for (int j = 0; j < 3; ++j) { + obj->add_rotation(transformation_(i, j)); + } + } + + for (int i = 0; i < 3; ++i) { + obj->add_translation(transformation_(i, 3)); + } + + for (int i = 0; i < 3; ++i) { + obj->add_scale(scale_[i]); + } +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/model.h b/mediapipe/modules/objectron/calculators/model.h new file mode 100644 index 0000000..72b5eb2 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/model.h @@ -0,0 +1,92 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_MODEL_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_MODEL_H_ + +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/object.pb.h" +#include "mediapipe/modules/objectron/calculators/types.h" + +namespace mediapipe { + +class Model { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + enum Type { + kVisualizationOnly = 0, + kBoundingBox, + kSkeleton, + kShape, // Shape is a virtual object. + kNumModes, + }; + + virtual ~Model() = default; + + virtual void SetTransformation(const Eigen::Matrix4f& transform); + virtual void SetTranslation(const Eigen::Vector3f& translation); + + // Compute the rotation matrix from these angles and update the transformation + // matrix accordingly + virtual void SetRotation(float roll, float pitch, float yaw); + virtual void SetRotation(const Eigen::Matrix3f& rotation); + virtual void SetScale(const Eigen::Vector3f& scale); + virtual void SetCategory(const std::string& category); + virtual size_t GetNumberKeypoints() const { return number_keypoints_; } + + // Gets Euler angles in the order of roll, pitch, yaw. + virtual const Eigen::Vector3f GetRotationAngles() const; + virtual const Eigen::Matrix4f& GetTransformation() const; + virtual const Eigen::Vector3f& GetScale() const; + virtual const Eigen::Ref GetTranslation() const; + virtual const Eigen::Ref GetRotation() const; + virtual const std::string& GetCategory() const; + + // Update the model's keypoints in the world-coordinate system. + // The update includes transforming the model to the world-coordinate system + // as well as scaling the model. + // The user is expected to call this function after Setting the rotation, + // orientation or the scale of the model to get an updated model. + virtual void Update() = 0; + + // Update the model's parameters (orientation, position, and scale) from the + // user-provided variables. + virtual void Adjust(const std::vector& variables) = 0; + + // Returns a pointer to the model's keypoints. + // Use Eigen::Map to cast the pointer back to Vector3 or Vector4 + virtual const float* GetVertex(size_t id) const = 0; + virtual float* GetVertex(size_t id) = 0; + virtual void Deserialize(const Object& obj); + virtual void Serialize(Object* obj); + + // TODO: make member variables protected, and add public apis. + // 4x4 transformation matrix mapping the first keypoint to world coordinate + Eigen::Matrix4f transformation_; + Eigen::Vector3f scale_; // width, height, depth + Type model_type_; + size_t number_keypoints_; + std::string category_; + + protected: + Model(Type type, size_t number_keypoints, const std::string& category) + : model_type_(type), + number_keypoints_(number_keypoints), + category_(category) {} +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_MODEL_H_ diff --git a/mediapipe/modules/objectron/calculators/object.proto b/mediapipe/modules/objectron/calculators/object.proto new file mode 100644 index 0000000..a07e83f --- /dev/null +++ b/mediapipe/modules/objectron/calculators/object.proto @@ -0,0 +1,124 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package mediapipe; + +message KeyPoint { + // The position of the keypoint in the local coordinate system of the rigid + // object. + float x = 1; + float y = 2; + float z = 3; + + // Sphere around the keypoint, indiciating annotator's confidence of the + // position in meters. + float confidence_radius = 4; + + // The name of the keypoint (e.g. legs, head, etc.). + // Does not have to be unique. + string name = 5; + + // Indicates whether the keypoint is hidden or not. + bool hidden = 6; +} + +message Object { + // Unique object id through a sequence. There might be multiple objects of + // the same label in this sequence. + int32 id = 1; + + // Describes what category an object is. E.g. object class, attribute, + // instance or person identity. This provides additional context for the + // object type. + string category = 2; + + enum Type { + UNDEFINED_TYPE = 0; + BOUNDING_BOX = 1; + SKELETON = 2; + } + + Type type = 3; + + // 3x3 row-major rotation matrix describing the orientation of the rigid + // object's frame of reference in the world-coordinate system. + repeated float rotation = 4; + + // 3x1 vector describing the translation of the rigid object's frame of + // reference in the world-coordinate system in meters. + repeated float translation = 5; + + // 3x1 vector describing the scale of the rigid object's frame of reference in + // the world-coordinate system in meters. + repeated float scale = 6; + + // List of all the key points associated with this object in the object + // coordinate system. + // The first keypoint is always the object's frame of reference, + // e.g. the centroid of the box. + // E.g. bounding box with its center as frame of reference, the 9 keypoints : + // {0., 0., 0.}, + // {-.5, -.5, -.5}, {-.5, -.5, +.5}, {-.5, +.5, -.5}, {-.5, +.5, +.5}, + // {+.5, -.5, -.5}, {+.5, -.5, +.5}, {+.5, +.5, -.5}, {+.5, +.5, +.5} + // To get the bounding box in the world-coordinate system, we first scale the + // box then transform the scaled box. + // For example, bounding box in the world coordinate system is + // rotation * scale * keypoints + translation + repeated KeyPoint keypoints = 7; + + // Enum to reflect how this object is created. + enum Method { + UNKNOWN_METHOD = 0; + ANNOTATION = 1; // Created by data annotation. + AUGMENTATION = 2; // Created by data augmentation. + } + Method method = 8; +} + +// The edge connecting two keypoints together +message Edge { + // keypoint id of the edge's source + int32 source = 1; + + // keypoint id of the edge's sink + int32 sink = 2; +} + +// The skeleton template for different objects (e.g. humans, chairs, hands, etc) +// The annotation tool reads the skeleton template dictionary. +message Skeleton { + // The origin keypoint in the object coordinate system. (i.e. Point 0, 0, 0) + int32 reference_keypoint = 1; + + // The skeleton's category (e.g. human, chair, hand.). Should be unique in the + // dictionary. + string category = 2; + + // Initialization value for all the keypoints in the skeleton in the object's + // local coordinate system. Pursuit will transform these points using object's + // transformation to get the keypoint in the world-cooridnate. + repeated KeyPoint keypoints = 3; + + // List of edges connecting keypoints + repeated Edge edges = 4; +} + +// The list of all the modeled skeletons in our library. These models can be +// objects (chairs, desks, etc), humans (full pose, hands, faces, etc), or box. +// We can have multiple skeletons in the same file. +message Skeletons { + repeated Skeleton object = 1; +} diff --git a/mediapipe/modules/objectron/calculators/tensor_util.cc b/mediapipe/modules/objectron/calculators/tensor_util.cc new file mode 100644 index 0000000..0004edd --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tensor_util.cc @@ -0,0 +1,48 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/modules/objectron/calculators/tensor_util.h" + +#include "mediapipe/framework/port/logging.h" + +namespace mediapipe { + +cv::Mat ConvertTfliteTensorToCvMat(const TfLiteTensor& tensor) { + // Check tensor is BxCxWxH (size = 4) and the batch size is one(data[0] = 1) + CHECK(tensor.dims->size == 4 && tensor.dims->data[0] == 1); + CHECK_EQ(kTfLiteFloat32, tensor.type) << "tflite_tensor type is not float"; + + const size_t num_output_channels = tensor.dims->data[3]; + const int dims = 2; + const int sizes[] = {tensor.dims->data[1], tensor.dims->data[2]}; + const int type = CV_MAKETYPE(CV_32F, num_output_channels); + return cv::Mat(dims, sizes, type, reinterpret_cast(tensor.data.f)); +} + +cv::Mat ConvertTensorToCvMat(const mediapipe::Tensor& tensor) { + // Check tensor is BxCxWxH (size = 4) and the batch size is one(data[0] = 1) + CHECK(tensor.shape().dims.size() == 4 && tensor.shape().dims[0] == 1); + CHECK_EQ(mediapipe::Tensor::ElementType::kFloat32 == tensor.element_type(), + true) + << "tensor type is not float"; + + const size_t num_output_channels = tensor.shape().dims[3]; + const int dims = 2; + const int sizes[] = {tensor.shape().dims[1], tensor.shape().dims[2]}; + const int type = CV_MAKETYPE(CV_32F, num_output_channels); + auto cpu_view = tensor.GetCpuReadView(); + return cv::Mat(dims, sizes, type, const_cast(cpu_view.buffer())); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/tensor_util.h b/mediapipe/modules/objectron/calculators/tensor_util.h new file mode 100644 index 0000000..0b26209 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tensor_util.h @@ -0,0 +1,31 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TENSOR_UTIL_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TENSOR_UTIL_H_ + +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "tensorflow/lite/interpreter.h" + +namespace mediapipe { + +// Converts a single channel tflite tensor to a grayscale image +cv::Mat ConvertTfliteTensorToCvMat(const TfLiteTensor& tensor); + +// Converts a single channel tensor to grayscale image +cv::Mat ConvertTensorToCvMat(const mediapipe::Tensor& tensor); +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TENSOR_UTIL_H_ diff --git a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc new file mode 100644 index 0000000..6989c34 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc @@ -0,0 +1,209 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "Eigen/Dense" +#include "absl/memory/memory.h" +#include "absl/strings/str_format.h" +#include "absl/types/span.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/belief_decoder_config.pb.h" +#include "mediapipe/modules/objectron/calculators/decoder.h" +#include "mediapipe/modules/objectron/calculators/tensor_util.h" +#include "mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.pb.h" + +namespace { +constexpr char kInputStreamTag[] = "TENSORS"; +constexpr char kOutputStreamTag[] = "ANNOTATIONS"; + +// Each detection object will be assigned an unique id that starts from 1. +static int object_id = 0; + +inline int GetNextObjectId() { return ++object_id; } +} // namespace + +namespace mediapipe { + +// Convert result Tensors from deep pursuit 3d model into FrameAnnotation. +// +// Input: +// TENSORS - Vector of Tensor of type kFloat32. +// Output: +// ANNOTATIONS - Result FrameAnnotation. +// +// Usage example: +// node { +// calculator: "TensorsToObjectsCalculator" +// input_stream: "TENSORS:tensors" +// output_stream: "ANNOTATIONS:annotations" +// } +class TensorsToObjectsCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + absl::Status Close(CalculatorContext* cc) override; + + private: + absl::Status ProcessCPU(CalculatorContext* cc, + FrameAnnotation* output_objects); + absl::Status LoadOptions(CalculatorContext* cc); + // Takes point_3d in FrameAnnotation, projects to 2D, and overwrite the + // point_2d field with the projection. + void Project3DTo2D(bool portrait, FrameAnnotation* annotation) const; + // Increment and assign object ID for each detected object. + // In a single MediaPipe session, the IDs are unique. + // Also assign timestamp for the FrameAnnotation to be the input packet + // timestamp. + void AssignObjectIdAndTimestamp(int64 timestamp_us, + FrameAnnotation* annotation); + + int num_classes_ = 0; + int num_keypoints_ = 0; + + ::mediapipe::TensorsToObjectsCalculatorOptions options_; + std::unique_ptr decoder_; + Eigen::Matrix projection_matrix_; +}; +REGISTER_CALCULATOR(TensorsToObjectsCalculator); + +absl::Status TensorsToObjectsCalculator::GetContract(CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputStreamTag)) { + cc->Inputs().Tag(kInputStreamTag).Set>(); + } + + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs().Tag(kOutputStreamTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status TensorsToObjectsCalculator::Open(CalculatorContext* cc) { + MP_RETURN_IF_ERROR(LoadOptions(cc)); + // clang-format off + projection_matrix_ << + 1.5731, 0, 0, 0, + 0, 2.0975, 0, 0, + 0, 0, -1.0002, -0.2, + 0, 0, -1, 0; + // clang-format on + decoder_ = absl::make_unique( + BeliefDecoderConfig(options_.decoder_config())); + + return absl::OkStatus(); +} + +absl::Status TensorsToObjectsCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().Tag(kInputStreamTag).IsEmpty()) { + return absl::OkStatus(); + } + + auto output_objects = absl::make_unique(); + + MP_RETURN_IF_ERROR(ProcessCPU(cc, output_objects.get())); + + // Output + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs() + .Tag(kOutputStreamTag) + .Add(output_objects.release(), cc->InputTimestamp()); + } + + return absl::OkStatus(); +} + +absl::Status TensorsToObjectsCalculator::ProcessCPU( + CalculatorContext* cc, FrameAnnotation* output_objects) { + const auto& input_tensors = + cc->Inputs().Tag(kInputStreamTag).Get>(); + + cv::Mat prediction_heatmap = ConvertTensorToCvMat(input_tensors[0]); + cv::Mat offsetmap = ConvertTensorToCvMat(input_tensors[1]); + + *output_objects = + decoder_->DecodeBoundingBoxKeypoints(prediction_heatmap, offsetmap); + auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ true, + output_objects); + if (!status.ok()) { + LOG(ERROR) << status; + return status; + } + Project3DTo2D(/*portrait*/ true, output_objects); + AssignObjectIdAndTimestamp(cc->InputTimestamp().Microseconds(), + output_objects); + + return absl::OkStatus(); +} + +absl::Status TensorsToObjectsCalculator::Close(CalculatorContext* cc) { + return absl::OkStatus(); +} + +absl::Status TensorsToObjectsCalculator::LoadOptions(CalculatorContext* cc) { + // Get calculator options specified in the graph. + options_ = cc->Options<::mediapipe::TensorsToObjectsCalculatorOptions>(); + + num_classes_ = options_.num_classes(); + num_keypoints_ = options_.num_keypoints(); + + // Currently only support 2D when num_values_per_keypoint equals to 2. + CHECK_EQ(options_.num_values_per_keypoint(), 2); + + return absl::OkStatus(); +} + +void TensorsToObjectsCalculator::Project3DTo2D( + bool portrait, FrameAnnotation* annotation) const { + for (auto& ann : *annotation->mutable_annotations()) { + for (auto& key_point : *ann.mutable_keypoints()) { + Eigen::Vector4f point3d; + point3d << key_point.point_3d().x(), key_point.point_3d().y(), + key_point.point_3d().z(), 1.0f; + Eigen::Vector4f point3d_projection = projection_matrix_ * point3d; + float u, v; + const float inv_w = 1.0f / point3d_projection(3); + if (portrait) { + u = (point3d_projection(1) * inv_w + 1.0f) * 0.5f; + v = (point3d_projection(0) * inv_w + 1.0f) * 0.5f; + } else { + u = (point3d_projection(0) * inv_w + 1.0f) * 0.5f; + v = (1.0f - point3d_projection(1) * inv_w) * 0.5f; + } + key_point.mutable_point_2d()->set_x(u); + key_point.mutable_point_2d()->set_y(v); + } + } +} + +void TensorsToObjectsCalculator::AssignObjectIdAndTimestamp( + int64 timestamp_us, FrameAnnotation* annotation) { + for (auto& ann : *annotation->mutable_annotations()) { + ann.set_object_id(GetNextObjectId()); + } + annotation->set_timestamp(timestamp_us); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.proto b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.proto new file mode 100644 index 0000000..8d46fce --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.proto @@ -0,0 +1,39 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The option proto for the TensorsToObjectsCalculatorOptions. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/modules/objectron/calculators/belief_decoder_config.proto"; + +message TensorsToObjectsCalculatorOptions { + extend CalculatorOptions { + optional TensorsToObjectsCalculatorOptions ext = 334361940; + } + + // The number of output classes predicted by the detection model. + optional int32 num_classes = 1; + + // The number of predicted keypoints. + optional int32 num_keypoints = 2; + // The dimension of each keypoint, e.g. number of values predicted for each + // keypoint. + optional int32 num_values_per_keypoint = 3 [default = 2]; + + optional BeliefDecoderConfig decoder_config = 4; +} diff --git a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc new file mode 100644 index 0000000..e3686f6 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc @@ -0,0 +1,217 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "Eigen/Dense" +#include "absl/memory/memory.h" +#include "absl/strings/str_format.h" +#include "absl/types/span.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" +#include "mediapipe/modules/objectron/calculators/belief_decoder_config.pb.h" +#include "mediapipe/modules/objectron/calculators/decoder.h" +#include "mediapipe/modules/objectron/calculators/tensor_util.h" +#include "mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.pb.h" +#include "tensorflow/lite/interpreter.h" + +namespace { +constexpr char kInputStreamTag[] = "TENSORS"; +constexpr char kOutputStreamTag[] = "ANNOTATIONS"; + +// Each detection object will be assigned an unique id that starts from 1. +static int object_id = 0; + +inline int GetNextObjectId() { return ++object_id; } +} // namespace + +namespace mediapipe { + +// Convert result TFLite tensors from deep pursuit 3d model into +// FrameAnnotation. +// +// Input: +// TENSORS - Vector of TfLiteTensor of type kTfLiteFloat32. +// Output: +// ANNOTATIONS - Result FrameAnnotation. +// +// Usage example: +// node { +// calculator: "TfLiteTensorsToObjectsCalculator" +// input_stream: "TENSORS:tensors" +// output_stream: "ANNOTATIONS:annotations" +// } +class TfLiteTensorsToObjectsCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + absl::Status Close(CalculatorContext* cc) override; + + private: + absl::Status ProcessCPU(CalculatorContext* cc, + FrameAnnotation* output_objects); + absl::Status LoadOptions(CalculatorContext* cc); + // Takes point_3d in FrameAnnotation, projects to 2D, and overwrite the + // point_2d field with the projection. + void Project3DTo2D(bool portrait, FrameAnnotation* annotation) const; + // Increment and assign object ID for each detected object. + // In a single MediaPipe session, the IDs are unique. + // Also assign timestamp for the FrameAnnotation to be the input packet + // timestamp. + void AssignObjectIdAndTimestamp(int64 timestamp_us, + FrameAnnotation* annotation); + + int num_classes_ = 0; + int num_keypoints_ = 0; + + ::mediapipe::TfLiteTensorsToObjectsCalculatorOptions options_; + std::unique_ptr decoder_; + Eigen::Matrix projection_matrix_; +}; +REGISTER_CALCULATOR(TfLiteTensorsToObjectsCalculator); + +absl::Status TfLiteTensorsToObjectsCalculator::GetContract( + CalculatorContract* cc) { + RET_CHECK(!cc->Inputs().GetTags().empty()); + RET_CHECK(!cc->Outputs().GetTags().empty()); + + if (cc->Inputs().HasTag(kInputStreamTag)) { + cc->Inputs().Tag(kInputStreamTag).Set>(); + } + + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs().Tag(kOutputStreamTag).Set(); + } + return absl::OkStatus(); +} + +absl::Status TfLiteTensorsToObjectsCalculator::Open(CalculatorContext* cc) { + MP_RETURN_IF_ERROR(LoadOptions(cc)); + // Load camera intrinsic matrix. + const float fx = options_.normalized_focal_x(); + const float fy = options_.normalized_focal_y(); + const float px = options_.normalized_principal_point_x(); + const float py = options_.normalized_principal_point_y(); + // clang-format off + projection_matrix_ << fx, 0., px, 0., + 0., fy, py, 0., + 0., 0., -1., 0., + 0., 0., -1., 0.; + // clang-format on + decoder_ = absl::make_unique( + BeliefDecoderConfig(options_.decoder_config())); + + return absl::OkStatus(); +} + +absl::Status TfLiteTensorsToObjectsCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().Tag(kInputStreamTag).IsEmpty()) { + return absl::OkStatus(); + } + + auto output_objects = absl::make_unique(); + + MP_RETURN_IF_ERROR(ProcessCPU(cc, output_objects.get())); + + // Output + if (cc->Outputs().HasTag(kOutputStreamTag)) { + cc->Outputs() + .Tag(kOutputStreamTag) + .Add(output_objects.release(), cc->InputTimestamp()); + } + + return absl::OkStatus(); +} + +absl::Status TfLiteTensorsToObjectsCalculator::ProcessCPU( + CalculatorContext* cc, FrameAnnotation* output_objects) { + const auto& input_tensors = + cc->Inputs().Tag(kInputStreamTag).Get>(); + + cv::Mat prediction_heatmap = ConvertTfliteTensorToCvMat(input_tensors[0]); + cv::Mat offsetmap = ConvertTfliteTensorToCvMat(input_tensors[1]); + + *output_objects = + decoder_->DecodeBoundingBoxKeypoints(prediction_heatmap, offsetmap); + auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ true, + output_objects); + if (!status.ok()) { + LOG(ERROR) << status; + return status; + } + Project3DTo2D(/*portrait*/ true, output_objects); + AssignObjectIdAndTimestamp(cc->InputTimestamp().Microseconds(), + output_objects); + + return absl::OkStatus(); +} + +absl::Status TfLiteTensorsToObjectsCalculator::Close(CalculatorContext* cc) { + return absl::OkStatus(); +} + +absl::Status TfLiteTensorsToObjectsCalculator::LoadOptions( + CalculatorContext* cc) { + // Get calculator options specified in the graph. + options_ = + cc->Options<::mediapipe::TfLiteTensorsToObjectsCalculatorOptions>(); + + num_classes_ = options_.num_classes(); + num_keypoints_ = options_.num_keypoints(); + + // Currently only support 2D when num_values_per_keypoint equals to 2. + CHECK_EQ(options_.num_values_per_keypoint(), 2); + + return absl::OkStatus(); +} + +void TfLiteTensorsToObjectsCalculator::Project3DTo2D( + bool portrait, FrameAnnotation* annotation) const { + for (auto& ann : *annotation->mutable_annotations()) { + for (auto& key_point : *ann.mutable_keypoints()) { + Eigen::Vector4f point3d; + point3d << key_point.point_3d().x(), key_point.point_3d().y(), + key_point.point_3d().z(), 1.0f; + Eigen::Vector4f point3d_projection = projection_matrix_ * point3d; + float u, v; + const float inv_w = 1.0f / point3d_projection(3); + if (portrait) { + u = (point3d_projection(1) * inv_w + 1.0f) * 0.5f; + v = (point3d_projection(0) * inv_w + 1.0f) * 0.5f; + } else { + u = (point3d_projection(0) * inv_w + 1.0f) * 0.5f; + v = (1.0f - point3d_projection(1) * inv_w) * 0.5f; + } + key_point.mutable_point_2d()->set_x(u); + key_point.mutable_point_2d()->set_y(v); + } + } +} + +void TfLiteTensorsToObjectsCalculator::AssignObjectIdAndTimestamp( + int64 timestamp_us, FrameAnnotation* annotation) { + for (auto& ann : *annotation->mutable_annotations()) { + ann.set_object_id(GetNextObjectId()); + } + annotation->set_timestamp(timestamp_us); +} + +} // namespace mediapipe diff --git a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.proto b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.proto new file mode 100644 index 0000000..32520d9 --- /dev/null +++ b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.proto @@ -0,0 +1,51 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The option proto for the TfLiteTensorsToObjectsCalculatorOptions. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/modules/objectron/calculators/belief_decoder_config.proto"; + +message TfLiteTensorsToObjectsCalculatorOptions { + extend CalculatorOptions { + optional TfLiteTensorsToObjectsCalculatorOptions ext = 263667646; + } + + // The number of output classes predicted by the detection model. + optional int32 num_classes = 1; + + // The number of predicted keypoints. + optional int32 num_keypoints = 2; + // The dimension of each keypoint, e.g. number of values predicted for each + // keypoint. + optional int32 num_values_per_keypoint = 3 [default = 2]; + + optional BeliefDecoderConfig decoder_config = 4; + + // Camera focal length along x, normalized by width/2. + optional float normalized_focal_x = 5 [default = 1.0]; + + // Camera focal length along y, normalized by height/2. + optional float normalized_focal_y = 6 [default = 1.0]; + + // Camera principle point x, normalized by width/2, origin is image center. + optional float normalized_principal_point_x = 7 [default = 0.0]; + + // Camera principle point y, normalized by height/2, origin is image center. + optional float normalized_principal_point_y = 8 [default = 0.0]; +} diff --git a/mediapipe/modules/objectron/calculators/types.h b/mediapipe/modules/objectron/calculators/types.h new file mode 100644 index 0000000..dcc477d --- /dev/null +++ b/mediapipe/modules/objectron/calculators/types.h @@ -0,0 +1,56 @@ +// Copyright 2020 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TYPES_H_ +#define MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TYPES_H_ + +#include + +#include "Eigen/Geometry" + +namespace mediapipe { + +using Eigen::Map; +using Eigen::Vector2f; +using Eigen::Vector3f; +using Eigen::Vector4f; +using Matrix4f_RM = Eigen::Matrix; +using Matrix3f_RM = Eigen::Matrix; + +using Face = std::array; + +struct SuperPoint { + enum PointSourceType { kPointCloud = 0, kBoundingBox = 1, kSkeleton = 2 }; + // The id of the point in the point-cloud + int reference_point; + // The source of the + PointSourceType source; + // The id of the point in set of points in current frame + int id; + // If source is kBoundingBox or kSkeleton, object_id stores the id of which \ + // object this point belongs to. + int object_id; + // projected u-v value + Vector2f uv; + Vector2f pixel; + // the 3D point + Vector3f point_3d; + // Color + Eigen::Matrix color; + bool rendered; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_MODULES_OBJECTRON_CALCULATORS_TYPES_H_ diff --git a/mediapipe/modules/objectron/object_detection_3d_camera.tflite b/mediapipe/modules/objectron/object_detection_3d_camera.tflite new file mode 100644 index 0000000..14cb826 Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_camera.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_3d_chair.tflite b/mediapipe/modules/objectron/object_detection_3d_chair.tflite new file mode 100644 index 0000000..3a23dfd Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_chair.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite b/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite new file mode 100644 index 0000000..718dc97 Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_3d_cup.tflite b/mediapipe/modules/objectron/object_detection_3d_cup.tflite new file mode 100644 index 0000000..1a7a5d3 Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_cup.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite b/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite new file mode 100644 index 0000000..d64234d Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite b/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite new file mode 100644 index 0000000..2077114 Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite differ diff --git a/mediapipe/modules/objectron/object_detection_oid_v4_cpu.pbtxt b/mediapipe/modules/objectron/object_detection_oid_v4_cpu.pbtxt new file mode 100644 index 0000000..f7a09fc --- /dev/null +++ b/mediapipe/modules/objectron/object_detection_oid_v4_cpu.pbtxt @@ -0,0 +1,134 @@ +# MediaPipe Objectron object detection CPU subgraph. + +type: "ObjectDetectionOidV4Subgraph" + +input_stream: "IMAGE:input_video" +input_side_packet: "LABELS_CSV:allowed_labels" +output_stream: "DETECTIONS:detections" + +# Crops, resizes, and converts the input video into tensor. +# Preserves aspect ratio of the images. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:input_video" + output_stream: "TENSORS:image_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 300 + output_tensor_height: 300 + keep_aspect_ratio: false + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite" + delegate { xnnpack {} } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 300 + input_size_width: 300 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:all_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 24 + num_boxes: 1917 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.5 + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "all_detections" + output_stream: "labeled_detections" + options: { + [mediapipe.DetectionLabelIdToTextCalculatorOptions.ext] { + label_map_path: "mediapipe/modules/objectron/object_detection_oidv4_labelmap.txt" + } + } +} + +# Filters the detections to only those with valid scores +# for the specified allowed labels. +node { + calculator: "FilterDetectionCalculator" + input_stream: "DETECTIONS:labeled_detections" + output_stream: "DETECTIONS:filtered_detections" + input_side_packet: "LABELS_CSV:allowed_labels" +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "filtered_detections" + output_stream: "detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.5 + max_num_detections: 100 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} diff --git a/mediapipe/modules/objectron/object_detection_oid_v4_gpu.pbtxt b/mediapipe/modules/objectron/object_detection_oid_v4_gpu.pbtxt new file mode 100644 index 0000000..7873e80 --- /dev/null +++ b/mediapipe/modules/objectron/object_detection_oid_v4_gpu.pbtxt @@ -0,0 +1,136 @@ +# MediaPipe Objectron object detection GPU subgraph. + +type: "ObjectDetectionOidV4Subgraph" + +input_stream: "IMAGE_GPU:input_video" +input_side_packet: "LABELS_CSV:allowed_labels" +output_stream: "DETECTIONS:detections" + +# Crops, resizes, and converts the input video into tensor. +# Preserves aspect ratio of the images. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "TENSORS:image_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 300 + output_tensor_height: 300 + keep_aspect_ratio: false + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + } + } +} + + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:image_tensor" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite" + delegate { gpu {} } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 6 + min_scale: 0.2 + max_scale: 0.95 + input_size_height: 300 + input_size_width: 300 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 16 + strides: 32 + strides: 64 + strides: 128 + strides: 256 + strides: 512 + aspect_ratios: 1.0 + aspect_ratios: 2.0 + aspect_ratios: 0.5 + aspect_ratios: 3.0 + aspect_ratios: 0.3333 + reduce_boxes_in_lowest_layer: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:all_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 24 + num_boxes: 1917 + num_coords: 4 + ignore_classes: 0 + sigmoid_score: true + apply_exponential_on_box_size: true + x_scale: 10.0 + y_scale: 10.0 + h_scale: 5.0 + w_scale: 5.0 + min_score_thresh: 0.5 + } + } +} + +# Maps detection label IDs to the corresponding label text. The label map is +# provided in the label_map_path option. +node { + calculator: "DetectionLabelIdToTextCalculator" + input_stream: "all_detections" + output_stream: "labeled_detections" + options: { + [mediapipe.DetectionLabelIdToTextCalculatorOptions.ext] { + label_map_path: "object_detection_oidv4_labelmap.txt" + } + } +} + +# Filters the detections to only those with valid scores +# for the specified allowed labels. +node { + calculator: "FilterDetectionCalculator" + input_stream: "DETECTIONS:labeled_detections" + output_stream: "DETECTIONS:filtered_detections" + input_side_packet: "LABELS_CSV:allowed_labels" +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "filtered_detections" + output_stream: "detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.5 + max_num_detections: 100 + overlap_type: INTERSECTION_OVER_UNION + return_empty_detections: true + } + } +} diff --git a/mediapipe/modules/objectron/object_detection_oidv4_labelmap.txt b/mediapipe/modules/objectron/object_detection_oidv4_labelmap.txt new file mode 100644 index 0000000..ef9032c --- /dev/null +++ b/mediapipe/modules/objectron/object_detection_oidv4_labelmap.txt @@ -0,0 +1,24 @@ +??? +Bicycle +Boot +Laptop +Person +Chair +Cattle +Desk +Cat +Computer mouse +Computer monitor +Box +Mug +Coffee cup +Stationary bicycle +Table +Bottle +High heels +Vehicle +Footwear +Dog +Book +Camera +Car diff --git a/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite b/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite new file mode 100644 index 0000000..3cb7291 Binary files /dev/null and b/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite differ diff --git a/mediapipe/modules/objectron/objectron_cpu.pbtxt b/mediapipe/modules/objectron/objectron_cpu.pbtxt new file mode 100644 index 0000000..884da05 --- /dev/null +++ b/mediapipe/modules/objectron/objectron_cpu.pbtxt @@ -0,0 +1,224 @@ +# MediaPipe Objectron on CPU that produces 3D bounding boxes for objects. +type: "ObjectronCpuSubgraph" + +# Input/Output streams and input side packets. +input_stream: "IMAGE:image" +# Path to TfLite model for 3D bounding box landmark prediction +input_side_packet: "MODEL_PATH:box_landmark_model_path" +# Allowed category labels, e.g. Footwear, Coffee cup, Mug, Chair, Camera +input_side_packet: "LABELS_CSV:allowed_labels" +# Max number of objects to detect/track. (int) +input_side_packet: "MAX_NUM_OBJECTS:max_num_objects" +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# Bounding box landmarks topology definition. +# The numbers are indices in the box_landmarks list. +# +# 3 + + + + + + + + 7 +# +\ +\ UP +# + \ + \ +# + \ + \ | +# + 4 + + + + + + + + 8 | y +# + + + + | +# + + + + | +# + + (0) + + .------- x +# + + + + \ +# 1 + + + + + + + + 5 + \ +# \ + \ + \ z +# \ + \ + \ +# \+ \+ +# 2 + + + + + + + + 6 + +# Collection of detected 3D objects, represented as a FrameAnnotation. +output_stream: "FRAME_ANNOTATION:detected_objects" +# Collection of box landmarks. (NormalizedLandmarkList) +output_stream: "MULTI_LANDMARKS:multi_box_landmarks" +# Crop rectangles derived from bounding box landmarks. +output_stream: "NORM_RECTS:multi_box_rects" + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:0:box_landmark_model_path" + output_side_packet: "CONTENTS:0:box_landmark_model_blob" +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:box_landmark_model_blob" + output_side_packet: "MODEL:box_landmark_model" +} + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_box_rects_from_landmarks" + output_stream: "gated_prev_box_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided max_num_objects. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_box_rects_from_landmarks" + input_side_packet: "max_num_objects" + output_stream: "prev_has_enough_objects" +} + +# Drops the incoming image if BoxLandmarkSubgraph was able to identify box +# presence in the previous image. Otherwise, passes the incoming image through +# to trigger a new round of box detection in ObjectDetectionOidV4Subgraph. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_objects" + output_stream: "detection_image" + + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Subgraph that performs 2D object detection. +node { + calculator: "ObjectDetectionOidV4Subgraph" + input_stream: "IMAGE:detection_image" + input_side_packet: "LABELS_CSV:allowed_labels" + output_stream: "DETECTIONS:raw_detections" +} + +# Makes sure there are no more detections than provided max_num_objects. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "raw_detections" + output_stream: "detections" + input_side_packet: "max_num_objects" + +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "SIZE:image_size" +} + +# Converts results of box detection into rectangles (normalized by image size) +# that encloses the box. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTIONS:detections" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECTS:box_rects_from_detections" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + output_zero_rect_for_empty_detections: false + } + } +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on object detections from the current image. This +# calculator ensures that the output box_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "box_rects_from_detections" + input_stream: "gated_prev_box_rects_from_landmarks" + output_stream: "multi_box_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.2 + } + } +} + +# Outputs each element of box_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_box_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:multi_box_rects" + input_stream: "CLONE:image" + output_stream: "ITEM:single_box_rect" + output_stream: "CLONE:landmarks_image" + output_stream: "BATCH_END:box_rects_timestamp" +} + +# Subgraph that localizes box landmarks. +node { + calculator: "BoxLandmarkSubgraph" + input_stream: "IMAGE:landmarks_image" + input_side_packet: "MODEL:box_landmark_model" + input_stream: "NORM_RECT:single_box_rect" + output_stream: "NORM_LANDMARKS:single_box_landmarks" +} + +# Collects a set of landmarks for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:single_box_landmarks" + input_stream: "BATCH_END:box_rects_timestamp" + output_stream: "ITERABLE:multi_box_landmarks" +} + +# Convert box landmarks to frame annotations. +node { + calculator: "LandmarksToFrameAnnotationCalculator" + input_stream: "MULTI_LANDMARKS:multi_box_landmarks" + output_stream: "FRAME_ANNOTATION:box_annotations" +} + +# Lift the 2D landmarks to 3D using EPnP algorithm. +node { + name: "Lift2DFrameAnnotationTo3DCalculator" + calculator: "Lift2DFrameAnnotationTo3DCalculator" + input_stream: "FRAME_ANNOTATION:box_annotations" + output_stream: "LIFTED_FRAME_ANNOTATION:detected_objects" + options: { + [mediapipe.Lift2DFrameAnnotationTo3DCalculatorOptions.ext] { + normalized_focal_x: 1.0 + normalized_focal_y: 1.0 + } + } +} + +# Get rotated rectangle from detected box. +node { + calculator: "FrameAnnotationToRectCalculator" + input_stream: "FRAME_ANNOTATION:detected_objects" + output_stream: "NORM_RECTS:box_rects_from_landmarks" +} + +# Caches a box rectangle fed back from boxLandmarkSubgraph, and upon the +# arrival of the next input image sends out the cached rectangle with the +# timestamp replaced by that of the input image, essentially generating a packet +# that carries the previous box rectangle. Note that upon the arrival of the +# very first input image, an empty packet is sent out to jump start the +# feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:box_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_box_rects_from_landmarks" +} diff --git a/mediapipe/modules/objectron/objectron_detection_1stage_gpu.pbtxt b/mediapipe/modules/objectron/objectron_detection_1stage_gpu.pbtxt new file mode 100644 index 0000000..290b120 --- /dev/null +++ b/mediapipe/modules/objectron/objectron_detection_1stage_gpu.pbtxt @@ -0,0 +1,83 @@ +# MediaPipe Objectron detection gpu subgraph + +type: "ObjectronDetectionSubgraphGpu" + +input_stream: "IMAGE_GPU:input_video" +output_stream: "ANNOTATIONS:objects" + +# Transforms the input image on GPU to a 480x640 image. To scale the input +# image, the scale_mode option is set to FIT to preserve the aspect ratio, +# resulting in potential letterboxing in the transformed image. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "IMAGE_GPU:transformed_input_video" + options: { + [mediapipe.ImageTransformationCalculatorOptions.ext] { + output_width: 480 + output_height: 640 + scale_mode: FIT + } + } +} + +# Converts the transformed input image on GPU into an image tensor stored as a +# TfLiteTensor. +node { + calculator: "TfLiteConverterCalculator" + input_stream: "IMAGE_GPU:transformed_input_video" + output_stream: "TENSORS_GPU:image_tensor" +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "opresolver" + options: { + [mediapipe.TfLiteCustomOpResolverCalculatorOptions.ext] { + use_gpu: true + } + } +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "TfLiteInferenceCalculator" + input_stream: "TENSORS_GPU:image_tensor" + output_stream: "TENSORS:detection_tensors" + input_side_packet: "CUSTOM_OP_RESOLVER:opresolver" + options: { + [mediapipe.TfLiteInferenceCalculatorOptions.ext] { + model_path: "object_detection_3d.tflite" + } + } +} + +# Decodes the model's output tensor (the heatmap and the distance fields) to 2D +# keypoints. There are nine 2D keypoints: one center keypoint and eight vertices +# for the 3D bounding box. The calculator parameters determine's the decoder's +# sensitivity. +node { + calculator: "TfLiteTensorsToObjectsCalculator" + input_stream: "TENSORS:detection_tensors" + output_stream: "ANNOTATIONS:objects" + options: { + [mediapipe.TfLiteTensorsToObjectsCalculatorOptions.ext] { + num_classes: 1 + num_keypoints: 9 + decoder_config { + heatmap_threshold: 0.6 + local_max_distance: 2 + offset_scale_coef: 1.0 + voting_radius: 2 + voting_allowance: 1 + voting_threshold: 0.2 + } + normalized_focal_x: 2.0975 + normalized_focal_y: 1.5731 + } + } +} diff --git a/mediapipe/modules/objectron/objectron_gpu.pbtxt b/mediapipe/modules/objectron/objectron_gpu.pbtxt new file mode 100644 index 0000000..7ef2b67 --- /dev/null +++ b/mediapipe/modules/objectron/objectron_gpu.pbtxt @@ -0,0 +1,186 @@ +# MediaPipe Objectron on GPU that produces 3D bounding boxes for objects. +type: "ObjectronGpuSubgraph" + +# Input/Output streams and input side packets. +# Note that the input image is assumed to have aspect ratio 3:4 (width:height). +input_stream: "IMAGE_GPU:image" +# Allowed category labels, e.g. Footwear, Coffee cup, Mug, Chair, Camera +input_side_packet: "LABELS_CSV:allowed_labels" +# Max number of objects to detect/track. (int) +input_side_packet: "MAX_NUM_OBJECTS:max_num_objects" +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Collection of detected 3D objects, represented as a FrameAnnotation. +output_stream: "FRAME_ANNOTATION:detected_objects" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_box_rects_from_landmarks" + output_stream: "gated_prev_box_rects_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Determines if an input vector of NormalizedRect has a size greater than or +# equal to the provided max_num_objects. +node { + calculator: "NormalizedRectVectorHasMinSizeCalculator" + input_stream: "ITERABLE:gated_prev_box_rects_from_landmarks" + input_side_packet: "max_num_objects" + output_stream: "prev_has_enough_objects" +} + +# Drops the incoming image if BoxLandmarkSubgraph was able to identify box +# presence in the previous image. Otherwise, passes the incoming image through +# to trigger a new round of box detection in ObjectDetectionOidV4Subgraph. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "DISALLOW:prev_has_enough_objects" + output_stream: "detection_image" + + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Subgraph that performs 2D object detection. +node { + calculator: "ObjectDetectionOidV4Subgraph" + input_stream: "IMAGE_GPU:detection_image" + input_side_packet: "LABELS_CSV:allowed_labels" + output_stream: "DETECTIONS:raw_detections" +} + +# Makes sure there are no more detections than provided max_num_objects. +node { + calculator: "ClipDetectionVectorSizeCalculator" + input_stream: "raw_detections" + output_stream: "detections" + input_side_packet: "max_num_objects" + +} + +# Extracts image size from the input images. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Converts results of box detection into rectangles (normalized by image size) +# that encloses the box. +node { + calculator: "DetectionsToRectsCalculator" + input_stream: "DETECTIONS:detections" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECTS:box_rects_from_detections" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + output_zero_rect_for_empty_detections: false + } + } +} + +# Performs association between NormalizedRect vector elements from previous +# image and rects based on object detections from the current image. This +# calculator ensures that the output box_rects vector doesn't contain +# overlapping regions based on the specified min_similarity_threshold. +node { + calculator: "AssociationNormRectCalculator" + input_stream: "box_rects_from_detections" + input_stream: "gated_prev_box_rects_from_landmarks" + output_stream: "box_rects" + options: { + [mediapipe.AssociationCalculatorOptions.ext] { + min_similarity_threshold: 0.2 + } + } +} + +# Outputs each element of box_rects at a fake timestamp for the rest of the +# graph to process. Clones image and image size packets for each +# single_box_rect at the fake timestamp. At the end of the loop, outputs the +# BATCH_END timestamp for downstream calculators to inform them that all +# elements in the vector have been processed. +node { + calculator: "BeginLoopNormalizedRectCalculator" + input_stream: "ITERABLE:box_rects" + input_stream: "CLONE:image" + output_stream: "ITEM:single_box_rect" + output_stream: "CLONE:landmarks_image" + output_stream: "BATCH_END:box_rects_timestamp" +} + +# Subgraph that localizes box landmarks. +node { + calculator: "BoxLandmarkSubgraph" + input_stream: "IMAGE:landmarks_image" + input_stream: "NORM_RECT:single_box_rect" + output_stream: "NORM_LANDMARKS:single_box_landmarks" +} + +# Collects a set of landmarks for each hand into a vector. Upon receiving the +# BATCH_END timestamp, outputs the vector of landmarks at the BATCH_END +# timestamp. +node { + calculator: "EndLoopNormalizedLandmarkListVectorCalculator" + input_stream: "ITEM:single_box_landmarks" + input_stream: "BATCH_END:box_rects_timestamp" + output_stream: "ITERABLE:multi_box_landmarks" +} + +# Convert box landmarks to frame annotations. +node { + calculator: "LandmarksToFrameAnnotationCalculator" + input_stream: "MULTI_LANDMARKS:multi_box_landmarks" + output_stream: "FRAME_ANNOTATION:box_annotations" +} + +# Lift the 2D landmarks to 3D using EPnP algorithm. +node { + calculator: "Lift2DFrameAnnotationTo3DCalculator" + input_stream: "FRAME_ANNOTATION:box_annotations" + output_stream: "LIFTED_FRAME_ANNOTATION:detected_objects" + options: { + [mediapipe.Lift2DFrameAnnotationTo3DCalculatorOptions.ext] { + normalized_focal_x: 2.0975 + normalized_focal_y: 1.5731 + } + } +} + +# Get rotated rectangle from detected box. +node { + calculator: "FrameAnnotationToRectCalculator" + input_stream: "FRAME_ANNOTATION:detected_objects" + output_stream: "NORM_RECTS:box_rects_from_landmarks" +} + +# Caches a box rectangle fed back from boxLandmarkSubgraph, and upon the +# arrival of the next input image sends out the cached rectangle with the +# timestamp replaced by that of the input image, essentially generating a packet +# that carries the previous box rectangle. Note that upon the arrival of the +# very first input image, an empty packet is sent out to jump start the +# feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:box_rects_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_box_rects_from_landmarks" +} diff --git a/mediapipe/modules/objectron/objectron_tracking_1stage_gpu.pbtxt b/mediapipe/modules/objectron/objectron_tracking_1stage_gpu.pbtxt new file mode 100644 index 0000000..eb19a44 --- /dev/null +++ b/mediapipe/modules/objectron/objectron_tracking_1stage_gpu.pbtxt @@ -0,0 +1,176 @@ +# MediaPipe Objectron tracking gpu subgraph + +type: "ObjectronTrackingSubgraphGpu" + +input_stream: "FRAME_ANNOTATION:objects" +input_stream: "IMAGE_GPU:input_video" +output_stream: "LIFTED_FRAME_ANNOTATION:lifted_tracked_objects" + + +# Converts the detected keypoints to Boxes, used by the tracking subgraph. +node { + calculator: "FrameAnnotationToTimedBoxListCalculator" + input_stream: "FRAME_ANNOTATION:objects" + output_stream: "BOXES:start_pos" +} + +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:input_video" + output_stream: "IMAGE_GPU:downscaled_input_video" + options: { + [mediapipe.ImageTransformationCalculatorOptions.ext] { + output_width: 240 + output_height: 320 + } + } +} + +# Converts GPU buffer to ImageFrame for processing tracking. +node: { + calculator: "GpuBufferToImageFrameCalculator" + input_stream: "downscaled_input_video" + output_stream: "downscaled_input_video_cpu" +} + +# Performs motion analysis on an incoming video stream. +node: { + calculator: "MotionAnalysisCalculator" + input_stream: "VIDEO:downscaled_input_video_cpu" + output_stream: "CAMERA:camera_motion" + output_stream: "FLOW:region_flow" + + options: { + [mediapipe.MotionAnalysisCalculatorOptions.ext]: { + analysis_options { + analysis_policy: ANALYSIS_POLICY_CAMERA_MOBILE + flow_options { + fast_estimation_min_block_size: 100 + top_inlier_sets: 1 + frac_inlier_error_threshold: 3e-3 + downsample_mode: DOWNSAMPLE_TO_INPUT_SIZE + verification_distance: 5.0 + verify_long_feature_acceleration: true + verify_long_feature_trigger_ratio: 0.1 + tracking_options { + max_features: 500 + adaptive_extraction_levels: 2 + min_eig_val_settings { + adaptive_lowest_quality_level: 2e-4 + } + klt_tracker_implementation: KLT_OPENCV + } + } + } + } + } +} + +# Reads optical flow fields defined in +# mediapipe/framework/formats/motion/optical_flow_field.h, +# returns a VideoFrame with 2 channels (v_x and v_y), each channel is quantized +# to 0-255. +node: { + calculator: "FlowPackagerCalculator" + input_stream: "FLOW:region_flow" + input_stream: "CAMERA:camera_motion" + output_stream: "TRACKING:tracking_data" + + options: { + [mediapipe.FlowPackagerCalculatorOptions.ext]: { + flow_packager_options: { + binary_tracking_data_support: false + } + } + } +} + +# Tracks box positions over time. +node: { + calculator: "BoxTrackerCalculator" + input_stream: "TRACKING:tracking_data" + input_stream: "TRACK_TIME:input_video" + input_stream: "START_POS:start_pos" + input_stream: "CANCEL_OBJECT_ID:cancel_object_id" + input_stream_info: { + tag_index: "CANCEL_OBJECT_ID" + back_edge: true + } + output_stream: "BOXES:boxes" + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "TRACKING" + tag_index: "TRACK_TIME" + } + sync_set { + tag_index: "START_POS" + } + sync_set { + tag_index: "CANCEL_OBJECT_ID" + } + } + } + } + + options: { + [mediapipe.BoxTrackerCalculatorOptions.ext]: { + tracker_options: { + track_step_options { + track_object_and_camera: true + tracking_degrees: TRACKING_DEGREE_OBJECT_ROTATION_SCALE + inlier_spring_force: 0.0 + static_motion_temporal_ratio: 3e-2 + } + } + visualize_tracking_data: false + streaming_track_data_cache_size: 100 + } + } +} + +# Consolidates tracking and detection results. +node { + calculator: "FrameAnnotationTrackerCalculator" + input_stream: "FRAME_ANNOTATION:objects" + input_stream: "TRACKED_BOXES:boxes" + output_stream: "TRACKED_FRAME_ANNOTATION:tracked_objects" + output_stream: "CANCEL_OBJECT_ID:cancel_object_id" + options: { + [mediapipe.FrameAnnotationTrackerCalculatorOptions.ext] { + img_width: 240 + img_height: 320 + iou_threshold: 0.1 + } + } + + input_stream_handler { + input_stream_handler: "SyncSetInputStreamHandler" + options { + [mediapipe.SyncSetInputStreamHandlerOptions.ext] { + sync_set { + tag_index: "FRAME_ANNOTATION" + } + sync_set { + tag_index: "TRACKED_BOXES" + } + } + } + } +} + +# Lift the tracked 2D keypoints to 3D using EPnP algorithm. +node { + calculator: "Lift2DFrameAnnotationTo3DCalculator" + input_stream: "FRAME_ANNOTATION:tracked_objects" + output_stream: "LIFTED_FRAME_ANNOTATION:lifted_tracked_objects" + options: { + [mediapipe.Lift2DFrameAnnotationTo3DCalculatorOptions.ext] { + normalized_focal_x: 2.0975 + normalized_focal_y: 1.5731 + } + } +} diff --git a/mediapipe/modules/palm_detection/BUILD b/mediapipe/modules/palm_detection/BUILD new file mode 100644 index 0000000..bed734b --- /dev/null +++ b/mediapipe/modules/palm_detection/BUILD @@ -0,0 +1,71 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "palm_detection_lite.tflite", + "palm_detection_full.tflite", +]) + +mediapipe_simple_subgraph( + name = "palm_detection_model_loader", + graph = "palm_detection_model_loader.pbtxt", + register_as = "PalmDetectionModelLoader", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "palm_detection_cpu", + graph = "palm_detection_cpu.pbtxt", + register_as = "PalmDetectionCpu", + deps = [ + ":palm_detection_model_loader", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:detection_letterbox_removal_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "palm_detection_gpu", + graph = "palm_detection_gpu.pbtxt", + register_as = "PalmDetectionGpu", + deps = [ + ":palm_detection_model_loader", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:detection_letterbox_removal_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) diff --git a/mediapipe/modules/palm_detection/README.md b/mediapipe/modules/palm_detection/README.md new file mode 100644 index 0000000..c7fd610 --- /dev/null +++ b/mediapipe/modules/palm_detection/README.md @@ -0,0 +1,7 @@ +# palm_detection + +Subgraphs|Details +:--- | :--- +[`PalmDetectionCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt)| Detects palms/hands. (CPU input.) +[`PalmDetectionGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt)| Detects palms/hands. (GPU input.) + diff --git a/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt b/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt new file mode 100644 index 0000000..32b3927 --- /dev/null +++ b/mediapipe/modules/palm_detection/palm_detection_cpu.pbtxt @@ -0,0 +1,147 @@ +# MediaPipe graph to detect palms with TensorFlow Lite on CPU. + +type: "PalmDetectionCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Complexity of the palm detection model: 0 or 1. Accuracy as well as inference +# latency generally go up with the model complexity. If unspecified, functions +# as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Detected palms. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of palms detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Transforms an image into a 128x128 tensor while keeping the aspect ratio, and +# therefore may result in potential letterboxing. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + output_stream: "TENSORS:input_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } +} +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "opresolver" +} + +# Loads the palm detection TF Lite model. +node { + calculator: "PalmDetectionModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensor" + output_stream: "TENSORS:detection_tensors" + input_side_packet: "CUSTOM_OP_RESOLVER:opresolver" + input_side_packet: "MODEL:model" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { xnnpack {} } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 4 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_width: 192 + input_size_height: 192 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 8 + strides: 16 + strides: 16 + strides: 16 + aspect_ratios: 1.0 + fixed_anchor_size: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 2016 + num_coords: 18 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 7 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + + x_scale: 192.0 + y_scale: 192.0 + w_scale: 192.0 + h_scale: 192.0 + min_score_thresh: 0.5 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Adjusts detection locations (already normalized to [0.f, 1.f]) on the +# letterboxed image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (the +# input image to the graph before image transformation). +node { + calculator: "DetectionLetterboxRemovalCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/palm_detection/palm_detection_full.tflite b/mediapipe/modules/palm_detection/palm_detection_full.tflite new file mode 100755 index 0000000..aee76a9 Binary files /dev/null and b/mediapipe/modules/palm_detection/palm_detection_full.tflite differ diff --git a/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt b/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt new file mode 100644 index 0000000..73e4127 --- /dev/null +++ b/mediapipe/modules/palm_detection/palm_detection_gpu.pbtxt @@ -0,0 +1,153 @@ +# MediaPipe graph to detect palms with TensorFlow Lite on GPU. + +type: "PalmDetectionGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Complexity of the palm detection model: 0 or 1. Accuracy as well as inference +# latency generally go up with the model complexity. If unspecified, functions +# as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Detected palms. (std::vector) +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of palms detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Transforms an image into a 256x256 tensor while keeping the aspect ratio, and +# therefore may result in potential letterboxing. +node { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "TENSORS:input_tensor" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 192 + output_tensor_height: 192 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "opresolver" + options: { + [mediapipe.TfLiteCustomOpResolverCalculatorOptions.ext] { + use_gpu: true + } + } +} + +# Loads the palm detection TF Lite model. +node { + calculator: "PalmDetectionModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs a TensorFlow Lite model on GPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensor" + output_stream: "TENSORS:detection_tensors" + input_side_packet: "CUSTOM_OP_RESOLVER:opresolver" + input_side_packet: "MODEL:model" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { gpu {} } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 4 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_width: 192 + input_size_height: 192 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 8 + strides: 16 + strides: 16 + strides: 16 + aspect_ratios: 1.0 + fixed_anchor_size: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 2016 + num_coords: 18 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 7 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + + x_scale: 192.0 + y_scale: 192.0 + w_scale: 192.0 + h_scale: 192.0 + min_score_thresh: 0.5 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Adjusts detection locations (already normalized to [0.f, 1.f]) on the +# letterboxed image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (the +# input image to the graph before image transformation). +node { + calculator: "DetectionLetterboxRemovalCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/palm_detection/palm_detection_lite.tflite b/mediapipe/modules/palm_detection/palm_detection_lite.tflite new file mode 100755 index 0000000..a19339a Binary files /dev/null and b/mediapipe/modules/palm_detection/palm_detection_lite.tflite differ diff --git a/mediapipe/modules/palm_detection/palm_detection_model_loader.pbtxt b/mediapipe/modules/palm_detection/palm_detection_model_loader.pbtxt new file mode 100644 index 0000000..f33a76e --- /dev/null +++ b/mediapipe/modules/palm_detection/palm_detection_model_loader.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph to load a selected palm detection TF Lite model. + +type: "PalmDetectionModelLoader" + +# Complexity of the palm detection model: 0 or 1. Accuracy as well as inference +# latency generally go up with the model complexity. If unspecified, functions +# as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# TF Lite model represented as a FlatBuffer. +# (std::unique_ptr>) +output_side_packet: "MODEL:model" + +# Determines path to the desired pose landmark model file. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_complexity" + output_side_packet: "PACKET:model_path" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 1 + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/palm_detection/palm_detection_lite.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/palm_detection/palm_detection_full.tflite" + } + } + } + } + } + } +} + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:model_path" + output_side_packet: "CONTENTS:model_blob" + options: { + [mediapipe.LocalFileContentsCalculatorOptions.ext]: { + text_mode: false + } + } +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:model_blob" + output_side_packet: "MODEL:model" +} diff --git a/mediapipe/modules/pose_detection/BUILD b/mediapipe/modules/pose_detection/BUILD new file mode 100644 index 0000000..f460300 --- /dev/null +++ b/mediapipe/modules/pose_detection/BUILD @@ -0,0 +1,56 @@ +# Copyright 2019 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "pose_detection_cpu", + graph = "pose_detection_cpu.pbtxt", + register_as = "PoseDetectionCpu", + deps = [ + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_letterbox_removal_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_detection_gpu", + graph = "pose_detection_gpu.pbtxt", + register_as = "PoseDetectionGpu", + deps = [ + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tflite:ssd_anchors_calculator", + "//mediapipe/calculators/util:detection_letterbox_removal_calculator", + "//mediapipe/calculators/util:non_max_suppression_calculator", + ], +) + +exports_files( + srcs = [ + "pose_detection.tflite", + ], +) diff --git a/mediapipe/modules/pose_detection/README.md b/mediapipe/modules/pose_detection/README.md new file mode 100644 index 0000000..e2e3b2f --- /dev/null +++ b/mediapipe/modules/pose_detection/README.md @@ -0,0 +1,7 @@ +# pose_detection + +Subgraphs|Details +:--- | :--- +[`PoseDetectionCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt)| Detects poses. (CPU input, and inference is executed on CPU.) +[`PoseDetectionGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt)| Detects poses. (GPU input, and inference is executed on GPU.) + diff --git a/mediapipe/modules/pose_detection/pose_detection.tflite b/mediapipe/modules/pose_detection/pose_detection.tflite new file mode 100755 index 0000000..4f1c521 Binary files /dev/null and b/mediapipe/modules/pose_detection/pose_detection.tflite differ diff --git a/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt b/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt new file mode 100644 index 0000000..79ee1ac --- /dev/null +++ b/mediapipe/modules/pose_detection/pose_detection_cpu.pbtxt @@ -0,0 +1,159 @@ +# MediaPipe graph to detect poses. (CPU input, and inference is executed on +# CPU.) +# +# It is required that "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "PoseDetectionCpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:pose_detections" +# } + +type: "PoseDetectionCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Detected poses. (std::vector) +# Bounding box in each pose detection is currently set to the bounding box of +# the detected face. However, 4 additional key points are available in each +# detection, which are used to further calculate a (rotated) bounding box that +# encloses the body region of interest. Among the 4 key points, the first two +# are for identifying the full-body region, and the second two for upper body +# only: +# +# Key point 0 - mid hip center +# Key point 1 - point that encodes size & rotation (for full body) +# Key point 2 - mid shoulder center +# Key point 3 - point that encodes size & rotation (for upper body) +# +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of poses detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Transforms the input image into a 224x224 one while keeping the aspect ratio +# (what is expected by the corresponding model), resulting in potential +# letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + output_stream: "TENSORS:input_tensors" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + # If this calculator truly operates in the CPU, then gpu_origin is + # ignored, but if some build switch insists on GPU inference, then we will + # still need to set this. + gpu_origin: TOP_LEFT + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/pose_detection/pose_detection.tflite" + delegate { + xnnpack {} + } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 5 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_height: 224 + input_size_width: 224 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 8 + strides: 16 + strides: 32 + strides: 32 + strides: 32 + aspect_ratios: 1.0 + fixed_anchor_size: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 2254 + num_coords: 12 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 4 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + x_scale: 224.0 + y_scale: 224.0 + h_scale: 224.0 + w_scale: 224.0 + min_score_thresh: 0.5 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Adjusts detection locations (already normalized to [0.f, 1.f]) on the +# letterboxed image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (the +# input image to the graph before image transformation). +node { + calculator: "DetectionLetterboxRemovalCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt b/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt new file mode 100644 index 0000000..b95a117 --- /dev/null +++ b/mediapipe/modules/pose_detection/pose_detection_gpu.pbtxt @@ -0,0 +1,155 @@ +# MediaPipe graph to detect poses. (GPU input, and inference is executed on +# GPU.) +# +# It is required that "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# path during execution. +# +# EXAMPLE: +# node { +# calculator: "PoseDetectionGpu" +# input_stream: "IMAGE:image" +# output_stream: "DETECTIONS:pose_detections" +# } + +type: "PoseDetectionGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Detected poses. (std::vector) +# Bounding box in each pose detection is currently set to the bounding box of +# the detected face. However, 4 additional key points are available in each +# detection, which are used to further calculate a (rotated) bounding box that +# encloses the body region of interest. Among the 4 key points, the first two +# are for identifying the full-body region, and the second two for upper body +# only: +# +# Key point 0 - mid hip center +# Key point 1 - point that encodes size & rotation (for full body) +# Key point 2 - mid shoulder center +# Key point 3 - point that encodes size & rotation (for upper body) +# +# NOTE: there will not be an output packet in the DETECTIONS stream for this +# particular timestamp if none of poses detected. However, the MediaPipe +# framework will internally inform the downstream calculators of the absence of +# this packet so that they don't wait for it unnecessarily. +output_stream: "DETECTIONS:detections" + +# Transforms the input image into a 224x224 one while keeping the aspect ratio +# (what is expected by the corresponding model), resulting in potential +# letterboxing in the transformed image. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "TENSORS:input_tensors" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 224 + output_tensor_height: 224 + keep_aspect_ratio: true + output_tensor_float_range { + min: -1.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} + +# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a +# vector of tensors representing, for instance, detection boxes/keypoints and +# scores. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:detection_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + model_path: "mediapipe/modules/pose_detection/pose_detection.tflite" + # + delegate: { gpu { use_advanced_gpu_api: true } } + } + } +} + +# Generates a single side packet containing a vector of SSD anchors based on +# the specification in the options. +node { + calculator: "SsdAnchorsCalculator" + output_side_packet: "anchors" + options: { + [mediapipe.SsdAnchorsCalculatorOptions.ext] { + num_layers: 5 + min_scale: 0.1484375 + max_scale: 0.75 + input_size_height: 224 + input_size_width: 224 + anchor_offset_x: 0.5 + anchor_offset_y: 0.5 + strides: 8 + strides: 16 + strides: 32 + strides: 32 + strides: 32 + aspect_ratios: 1.0 + fixed_anchor_size: true + } + } +} + +# Decodes the detection tensors generated by the TensorFlow Lite model, based on +# the SSD anchors and the specification in the options, into a vector of +# detections. Each detection describes a detected object. +node { + calculator: "TensorsToDetectionsCalculator" + input_stream: "TENSORS:detection_tensors" + input_side_packet: "ANCHORS:anchors" + output_stream: "DETECTIONS:unfiltered_detections" + options: { + [mediapipe.TensorsToDetectionsCalculatorOptions.ext] { + num_classes: 1 + num_boxes: 2254 + num_coords: 12 + box_coord_offset: 0 + keypoint_coord_offset: 4 + num_keypoints: 4 + num_values_per_keypoint: 2 + sigmoid_score: true + score_clipping_thresh: 100.0 + reverse_output_order: true + x_scale: 224.0 + y_scale: 224.0 + h_scale: 224.0 + w_scale: 224.0 + min_score_thresh: 0.5 + } + } +} + +# Performs non-max suppression to remove excessive detections. +node { + calculator: "NonMaxSuppressionCalculator" + input_stream: "unfiltered_detections" + output_stream: "filtered_detections" + options: { + [mediapipe.NonMaxSuppressionCalculatorOptions.ext] { + min_suppression_threshold: 0.3 + overlap_type: INTERSECTION_OVER_UNION + algorithm: WEIGHTED + } + } +} + +# Adjusts detection locations (already normalized to [0.f, 1.f]) on the +# letterboxed image (after image transformation with the FIT scale mode) to the +# corresponding locations on the same image with the letterbox removed (the +# input image to the graph before image transformation). +node { + calculator: "DetectionLetterboxRemovalCalculator" + input_stream: "DETECTIONS:filtered_detections" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "DETECTIONS:detections" +} diff --git a/mediapipe/modules/pose_landmark/BUILD b/mediapipe/modules/pose_landmark/BUILD new file mode 100644 index 0000000..787f0e2 --- /dev/null +++ b/mediapipe/modules/pose_landmark/BUILD @@ -0,0 +1,189 @@ +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "pose_landmark_model_loader", + graph = "pose_landmark_model_loader.pbtxt", + register_as = "PoseLandmarkModelLoader", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmark_by_roi_gpu", + graph = "pose_landmark_by_roi_gpu.pbtxt", + register_as = "PoseLandmarkByRoiGpu", + deps = [ + ":pose_landmark_model_loader", + ":pose_landmarks_and_segmentation_inverse_projection", + ":tensors_to_pose_landmarks_and_segmentation", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmark_by_roi_cpu", + graph = "pose_landmark_by_roi_cpu.pbtxt", + register_as = "PoseLandmarkByRoiCpu", + deps = [ + ":pose_landmark_model_loader", + ":pose_landmarks_and_segmentation_inverse_projection", + ":tensors_to_pose_landmarks_and_segmentation", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "tensors_to_pose_landmarks_and_segmentation", + graph = "tensors_to_pose_landmarks_and_segmentation.pbtxt", + register_as = "TensorsToPoseLandmarksAndSegmentation", + deps = [ + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:split_landmarks_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/tensor:tensors_to_segmentation_calculator", + "//mediapipe/calculators/util:refine_landmarks_from_heatmap_calculator", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/calculators/util:visibility_copy_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmarks_and_segmentation_inverse_projection", + graph = "pose_landmarks_and_segmentation_inverse_projection.pbtxt", + register_as = "PoseLandmarksAndSegmentationInverseProjection", + deps = [ + "//mediapipe/calculators/image:warp_affine_calculator", + "//mediapipe/calculators/util:inverse_matrix_calculator", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:world_landmark_projection_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmark_filtering", + graph = "pose_landmark_filtering.pbtxt", + register_as = "PoseLandmarkFiltering", + deps = [ + "//mediapipe/calculators/util:alignment_points_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_smoothing_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:visibility_smoothing_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "pose_segmentation_filtering", + graph = "pose_segmentation_filtering.pbtxt", + register_as = "PoseSegmentationFiltering", + deps = [ + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/image:segmentation_smoothing_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmark_gpu", + graph = "pose_landmark_gpu.pbtxt", + register_as = "PoseLandmarkGpu", + deps = [ + ":pose_detection_to_roi", + ":pose_landmark_by_roi_gpu", + ":pose_landmark_filtering", + ":pose_landmarks_to_roi", + ":pose_segmentation_filtering", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:merge_calculator", + "//mediapipe/calculators/core:packet_presence_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/modules/pose_detection:pose_detection_gpu", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmark_cpu", + graph = "pose_landmark_cpu.pbtxt", + register_as = "PoseLandmarkCpu", + deps = [ + ":pose_detection_to_roi", + ":pose_landmark_by_roi_cpu", + ":pose_landmark_filtering", + ":pose_landmarks_to_roi", + ":pose_segmentation_filtering", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:gate_calculator", + "//mediapipe/calculators/core:merge_calculator", + "//mediapipe/calculators/core:packet_presence_calculator", + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/modules/pose_detection:pose_detection_cpu", + ], +) + +exports_files( + srcs = [ + "pose_landmark_full.tflite", + "pose_landmark_heavy.tflite", + "pose_landmark_lite.tflite", + ], +) + +mediapipe_simple_subgraph( + name = "pose_detection_to_roi", + graph = "pose_detection_to_roi.pbtxt", + register_as = "PoseDetectionToRoi", + deps = [ + "//mediapipe/calculators/util:alignment_points_to_rects_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "pose_landmarks_to_roi", + graph = "pose_landmarks_to_roi.pbtxt", + register_as = "PoseLandmarksToRoi", + deps = [ + "//mediapipe/calculators/util:alignment_points_to_rects_calculator", + "//mediapipe/calculators/util:landmarks_to_detection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + ], +) diff --git a/mediapipe/modules/pose_landmark/README.md b/mediapipe/modules/pose_landmark/README.md new file mode 100644 index 0000000..5752838 --- /dev/null +++ b/mediapipe/modules/pose_landmark/README.md @@ -0,0 +1,8 @@ +# pose_landmark + +Subgraphs|Details +:--- | :--- +[`PoseLandmarkByRoiCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU.) +[`PoseLandmarkByRoiGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU) +[`PoseLandmarkCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (CPU input, and inference is executed on CPU) +[`PoseLandmarkGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt)| Detects landmarks of a single body pose. See landmarks (aka keypoints) [scheme](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_topology.svg). (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/pose_landmark/pose_detection_to_roi.pbtxt b/mediapipe/modules/pose_landmark/pose_detection_to_roi.pbtxt new file mode 100644 index 0000000..47f82bb --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_detection_to_roi.pbtxt @@ -0,0 +1,45 @@ +# MediaPipe graph to calculate pose region of interest (ROI) from a detection +# provided by "PoseDetectionCpu" or "PoseDetectionGpu" +# +# NOTE: this graph is subject to change and should not be used directly. + +type: "PoseDetectionToRoi" + +# Pose detection. (Detection) +input_stream: "DETECTION:detection" +# Frame size (width and height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI according to the first detection of input detections. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts pose detection into a rectangle based on center and scale alignment +# points. +node { + calculator: "AlignmentPointsRectsCalculator" + input_stream: "DETECTION:detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:raw_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: 90 + } + } +} + +# Expands pose rect with marging used during training. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:raw_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.25 + scale_y: 1.25 + square_long: true + } + } +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt new file mode 100644 index 0000000..b674894 --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_by_roi_cpu.pbtxt @@ -0,0 +1,178 @@ +# MediaPipe graph to detect/predict pose landmarks and optionally segmentation +# within an ROI. (CPU input, and inference is executed on CPU.) +# +# It is required that "pose_landmark_lite.tflite" or +# "pose_landmark_full.tflite" or "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarkByRoiCpu" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_stream: "IMAGE:image" +# input_stream: "ROI:roi" +# output_stream: "LANDMARKS:landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "PoseLandmarkByRoiCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a pose is located. +# (NormalizedRect) +input_stream: "ROI:roi" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Pose landmarks within the given ROI. (NormalizedLandmarkList) +# We have 33 landmarks (see pose_landmark_topology.svg) and there are other +# auxiliary key points. +# 0 - nose +# 1 - left eye (inner) +# 2 - left eye +# 3 - left eye (outer) +# 4 - right eye (inner) +# 5 - right eye +# 6 - right eye (outer) +# 7 - left ear +# 8 - right ear +# 9 - mouth (left) +# 10 - mouth (right) +# 11 - left shoulder +# 12 - right shoulder +# 13 - left elbow +# 14 - right elbow +# 15 - left wrist +# 16 - right wrist +# 17 - left pinky +# 18 - right pinky +# 19 - left index +# 20 - right index +# 21 - left thumb +# 22 - right thumb +# 23 - left hip +# 24 - right hip +# 25 - left knee +# 26 - right knee +# 27 - left ankle +# 28 - right ankle +# 29 - left heel +# 30 - right heel +# 31 - left foot index +# 32 - right foot index +# +# NOTE: If a pose is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:landmarks" +# Auxiliary landmarks for deriving the ROI in the subsequent image. +# (NormalizedLandmarkList) +output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" + +# Pose world landmarks within the given ROI. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin at the +# center between hips. WORLD_LANDMARKS shares the same landmark topology as +# LANDMARKS. However, LANDMARKS provides coordinates (in pixels) of a 3D object +# projected onto the 2D image surface, while WORLD_LANDMARKS provides +# coordinates (in meters) of the 3D object itself. +output_stream: "WORLD_LANDMARKS:world_landmarks" + +# Segmentation mask on CPU in ImageFormat::VEC32F1. (Image) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Retrieves the image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "SIZE:image_size" +} + +# Crops and transforms the specified ROI in the input image into an image patch +# represented as a tensor of dimension expected by the corresponding ML model, +# while maintaining the aspect ratio of the ROI (which can be different from +# that of the image patch). Therefore, there can be letterboxing around the ROI +# in the generated tensor representation. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE:image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "MATRIX:transformation_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + } + } +} + +# Loads the pose landmark TF Lite model. +node { + calculator: "PoseLandmarkModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs model inference on CPU. +node { + calculator: "InferenceCalculator" + input_side_packet: "MODEL:model" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:output_tensors" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { xnnpack {} } + } + } +} + +# Decodes the tensors into the corresponding landmark and segmentation mask +# representation. +node { + calculator: "TensorsToPoseLandmarksAndSegmentation" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "TENSORS:output_tensors" + output_stream: "LANDMARKS:roi_landmarks" + output_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:roi_world_landmarks" + output_stream: "SEGMENTATION_MASK:roi_segmentation_mask" +} + +# Projects the landmarks and segmentation mask in the local coordinates of the +# (potentially letterboxed) ROI back to the global coordinates of the full input +# image. +node { + calculator: "PoseLandmarksAndSegmentationInverseProjection" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "NORM_RECT:roi" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + input_stream: "MATRIX:transformation_matrix" + input_stream: "LANDMARKS:roi_landmarks" + input_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" + input_stream: "WORLD_LANDMARKS:roi_world_landmarks" + input_stream: "SEGMENTATION_MASK:roi_segmentation_mask" + output_stream: "LANDMARKS:landmarks" + output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:world_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt new file mode 100644 index 0000000..7a2acce --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_by_roi_gpu.pbtxt @@ -0,0 +1,174 @@ +# MediaPipe graph to detect/predict pose landmarks and optionally segmentation +# within an ROI. (GPU input, and inference is executed on GPU.) +# +# It is required that "pose_landmark_lite.tflite" or +# "pose_landmark_full.tflite" or "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarkByRoiGpu" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_stream: "IMAGE:image" +# input_stream: "ROI:roi" +# output_stream: "LANDMARKS:landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "PoseLandmarkByRoiGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" +# ROI (region of interest) within the given image where a pose is located. +# (NormalizedRect) +input_stream: "ROI:roi" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Pose landmarks within the given ROI. (NormalizedLandmarkList) +# We have 33 landmarks (see pose_landmark_topology.svg), and there are other +# auxiliary key points. +# 0 - nose +# 1 - left eye (inner) +# 2 - left eye +# 3 - left eye (outer) +# 4 - right eye (inner) +# 5 - right eye +# 6 - right eye (outer) +# 7 - left ear +# 8 - right ear +# 9 - mouth (left) +# 10 - mouth (right) +# 11 - left shoulder +# 12 - right shoulder +# 13 - left elbow +# 14 - right elbow +# 15 - left wrist +# 16 - right wrist +# 17 - left pinky +# 18 - right pinky +# 19 - left index +# 20 - right index +# 21 - left thumb +# 22 - right thumb +# 23 - left hip +# 24 - right hip +# 25 - left knee +# 26 - right knee +# 27 - left ankle +# 28 - right ankle +# 29 - left heel +# 30 - right heel +# 31 - left foot index +# 32 - right foot index +# +# NOTE: If a pose is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:landmarks" +# Auxiliary landmarks for deriving the ROI in the subsequent image. +# (NormalizedLandmarkList) +output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" + +# Pose world landmarks within the given ROI. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin at the +# center between hips. WORLD_LANDMARKS shares the same landmark topology as +# LANDMARKS. However, LANDMARKS provides coordinates (in pixels) of a 3D object +# projected onto the 2D image surface, while WORLD_LANDMARKS provides +# coordinates (in meters) of the 3D object itself. +output_stream: "WORLD_LANDMARKS:world_landmarks" + +# Segmentation mask on GPU in RGBA with the same mask values in R and A. (Image) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Retrieves the image size. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Crops and transforms the specified ROI in the input image into an image patch +# represented as a tensor of dimension expected by the corresponding ML model, +# while maintaining the aspect ratio of the ROI (which can be different from +# that of the image patch). Therefore, there can be letterboxing around the ROI +# in the generated tensor representation. +node: { + calculator: "ImageToTensorCalculator" + input_stream: "IMAGE_GPU:image" + input_stream: "NORM_RECT:roi" + output_stream: "TENSORS:input_tensors" + output_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "MATRIX:transformation_matrix" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: true + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + gpu_origin: TOP_LEFT + } + } +} + +# Loads the pose landmark TF Lite model. +node { + calculator: "PoseLandmarkModelLoader" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + output_side_packet: "MODEL:model" +} + +# Runs model inference on GPU. +node { + calculator: "InferenceCalculator" + input_side_packet: "MODEL:model" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:output_tensors" +} + +# Decodes the tensors into the corresponding landmark and segmentation mask +# representation. +node { + calculator: "TensorsToPoseLandmarksAndSegmentation" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "TENSORS:output_tensors" + output_stream: "LANDMARKS:roi_landmarks" + output_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:roi_world_landmarks" + output_stream: "SEGMENTATION_MASK:roi_segmentation_mask" +} + +# Projects the landmarks and segmentation mask in the local coordinates of the +# (potentially letterboxed) ROI back to the global coordinates of the full input +# image. +node { + calculator: "PoseLandmarksAndSegmentationInverseProjection" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "NORM_RECT:roi" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + input_stream: "MATRIX:transformation_matrix" + input_stream: "LANDMARKS:roi_landmarks" + input_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" + input_stream: "WORLD_LANDMARKS:roi_world_landmarks" + input_stream: "SEGMENTATION_MASK:roi_segmentation_mask" + output_stream: "LANDMARKS:landmarks" + output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:world_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt new file mode 100644 index 0000000..5faf08a --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_cpu.pbtxt @@ -0,0 +1,268 @@ +# MediaPipe graph to detect/predict pose landmarks. (CPU input, and inference is +# executed on CPU.) This graph tries to skip pose detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# It is required that "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# path during execution. +# +# It is required that "pose_landmark_lite.tflite" or +# "pose_landmark_full.tflite" or "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarkCpu" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# input_stream: "IMAGE:image" +# output_stream: "LANDMARKS:pose_landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "PoseLandmarkCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# Whether to filter landmarks across different input images to reduce jitter. +# If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Whether to filter segmentation mask across different input images to reduce +# jitter. If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Pose landmarks. (NormalizedLandmarkList) +# We have 33 landmarks (see pose_landmark_topology.svg), and there are other +# auxiliary key points. +# 0 - nose +# 1 - left eye (inner) +# 2 - left eye +# 3 - left eye (outer) +# 4 - right eye (inner) +# 5 - right eye +# 6 - right eye (outer) +# 7 - left ear +# 8 - right ear +# 9 - mouth (left) +# 10 - mouth (right) +# 11 - left shoulder +# 12 - right shoulder +# 13 - left elbow +# 14 - right elbow +# 15 - left wrist +# 16 - right wrist +# 17 - left pinky +# 18 - right pinky +# 19 - left index +# 20 - right index +# 21 - left thumb +# 22 - right thumb +# 23 - left hip +# 24 - right hip +# 25 - left knee +# 26 - right knee +# 27 - left ankle +# 28 - right ankle +# 29 - left heel +# 30 - right heel +# 31 - left foot index +# 32 - right foot index +# +# NOTE: if a pose is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:pose_landmarks" + +# Pose world landmarks. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin at the +# center between hips. WORLD_LANDMARKS shares the same landmark topology as +# LANDMARKS. However, LANDMARKS provides coordinates (in pixels) of a 3D object +# projected onto the 2D image surface, while WORLD_LANDMARKS provides +# coordinates (in meters) of the 3D object itself. +output_stream: "WORLD_LANDMARKS:pose_world_landmarks" + +# Segmentation mask. (ImageFrame in ImageFormat::VEC32F1) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Extra outputs (for debugging, for instance). +# Detected poses. (Detection) +output_stream: "DETECTION:pose_detection" +# Regions of interest calculated based on landmarks. (NormalizedRect) +output_stream: "ROI_FROM_LANDMARKS:pose_rect_from_landmarks" +# Regions of interest calculated based on pose detections. (NormalizedRect) +output_stream: "ROI_FROM_DETECTION:pose_rect_from_detection" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_pose_rect_from_landmarks" + output_stream: "gated_prev_pose_rect_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Checks if there's previous pose rect calculated from landmarks. +node: { + calculator: "PacketPresenceCalculator" + input_stream: "PACKET:gated_prev_pose_rect_from_landmarks" + output_stream: "PRESENCE:prev_pose_rect_from_landmarks_is_present" +} + +# Calculates size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "SIZE:image_size" +} + +# Drops the incoming image if the pose has already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of pose detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "image_size" + input_stream: "DISALLOW:prev_pose_rect_from_landmarks_is_present" + output_stream: "image_for_pose_detection" + output_stream: "image_size_for_pose_detection" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects poses. +node { + calculator: "PoseDetectionCpu" + input_stream: "IMAGE:image_for_pose_detection" + output_stream: "DETECTIONS:pose_detections" +} + +# Gets the very first detection from "pose_detections" vector. +node { + calculator: "SplitDetectionVectorCalculator" + input_stream: "pose_detections" + output_stream: "pose_detection" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Calculates region of interest based on pose detection, so that can be used +# to detect landmarks. +node { + calculator: "PoseDetectionToRoi" + input_stream: "DETECTION:pose_detection" + input_stream: "IMAGE_SIZE:image_size_for_pose_detection" + output_stream: "ROI:pose_rect_from_detection" +} + +# Selects either pose rect (or ROI) calculated from detection or from previously +# detected landmarks if available (in this case, calculation of pose rect from +# detection is skipped). +node { + calculator: "MergeCalculator" + input_stream: "pose_rect_from_detection" + input_stream: "gated_prev_pose_rect_from_landmarks" + output_stream: "pose_rect" +} + +# Detects pose landmarks within specified region of interest of the image. +node { + calculator: "PoseLandmarkByRoiCpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "IMAGE:image" + input_stream: "ROI:pose_rect" + output_stream: "LANDMARKS:unfiltered_pose_landmarks" + output_stream: "AUXILIARY_LANDMARKS:unfiltered_auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:unfiltered_world_landmarks" + output_stream: "SEGMENTATION_MASK:unfiltered_segmentation_mask" +} + +# Smoothes landmarks to reduce jitter. +node { + calculator: "PoseLandmarkFiltering" + input_side_packet: "ENABLE:smooth_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "NORM_LANDMARKS:unfiltered_pose_landmarks" + input_stream: "AUX_NORM_LANDMARKS:unfiltered_auxiliary_landmarks" + input_stream: "WORLD_LANDMARKS:unfiltered_world_landmarks" + output_stream: "FILTERED_NORM_LANDMARKS:pose_landmarks" + output_stream: "FILTERED_AUX_NORM_LANDMARKS:auxiliary_landmarks" + output_stream: "FILTERED_WORLD_LANDMARKS:pose_world_landmarks" +} + +# Calculates region of interest based on the auxiliary landmarks, to be used in +# the subsequent image. +node { + calculator: "PoseLandmarksToRoi" + input_stream: "LANDMARKS:auxiliary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:pose_rect_from_landmarks" +} + +# Caches pose rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# pose rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:pose_rect_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_pose_rect_from_landmarks" +} + +# Smoothes segmentation to reduce jitter. +node { + calculator: "PoseSegmentationFiltering" + input_side_packet: "ENABLE:smooth_segmentation" + input_stream: "SEGMENTATION_MASK:unfiltered_segmentation_mask" + output_stream: "FILTERED_SEGMENTATION_MASK:filtered_segmentation_mask" +} + +# Converts the incoming segmentation mask represented as an Image into the +# corresponding ImageFrame type. +node: { + calculator: "FromImageCalculator" + input_stream: "IMAGE:filtered_segmentation_mask" + output_stream: "IMAGE_CPU:segmentation_mask" +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_filtering.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_filtering.pbtxt new file mode 100644 index 0000000..bb3665f --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_filtering.pbtxt @@ -0,0 +1,247 @@ +# MediaPipe graph to filter landmarks temporally (across packets with +# incremental timestamps) to reduce jitter. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarkFiltering" +# input_side_packet: "ENABLE:enable" +# input_stream: "IMAGE_SIZE:image_size" +# input_stream: "NORM_LANDMARKS:landmarks" +# input_stream: "AUX_NORM_LANDMARKS:aux_landmarks" +# input_stream: "WORLD_LANDMARKS:world_landmarks" +# output_stream: "FILTERED_NORM_LANDMARKS:filtered_landmarks" +# output_stream: "FILTERED_AUX_NORM_LANDMARKS:filtered_aux_landmarks" +# output_stream: "FILTERED_WORLD_LANDMARKS:filtered_world_landmarks" +# } + +type: "PoseLandmarkFiltering" + +# Whether to enable filtering. If unspecified, functions as enabled. (bool) +input_side_packet: "ENABLE:enable" + +# Size of the image (width & height) where the landmarks are estimated from. +# (std::pair) +input_stream: "IMAGE_SIZE:image_size" +# Normalized landmarks. (NormalizedLandmarkList) +input_stream: "NORM_LANDMARKS:landmarks" +# Auxiliary set of normalized landmarks. (NormalizedLandmarkList) +input_stream: "AUX_NORM_LANDMARKS:aux_landmarks" +# World landmarks. (LandmarkList) +input_stream: "WORLD_LANDMARKS:world_landmarks" +# Filtered normalized landmarks. (NormalizedLandmarkList) +output_stream: "FILTERED_NORM_LANDMARKS:filtered_landmarks" +# Filtered auxiliary set of normalized landmarks. (NormalizedLandmarkList) +output_stream: "FILTERED_AUX_NORM_LANDMARKS:filtered_aux_landmarks" +# Filtered world landmarks. (LandmarkList) +output_stream: "FILTERED_WORLD_LANDMARKS:filtered_world_landmarks" + +# Converts landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:aux_landmarks" + output_stream: "DETECTION:aux_detection" +} + +# Converts detection into a rectangle based on center and scale alignment +# points. +node { + calculator: "AlignmentPointsRectsCalculator" + input_stream: "DETECTION:aux_detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: 90 + } + } +} + +# Smoothes pose landmark visibilities to reduce jitter. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:enable" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "NORM_FILTERED_LANDMARKS:filtered_visibility" + options: { + [mediapipe.SwitchContainerOptions.ext] { + enable: true + contained_node: { + calculator: "VisibilitySmoothingCalculator" + options: { + [mediapipe.VisibilitySmoothingCalculatorOptions.ext] { + no_filter: {} + } + } + } + contained_node: { + calculator: "VisibilitySmoothingCalculator" + options: { + [mediapipe.VisibilitySmoothingCalculatorOptions.ext] { + low_pass_filter { + alpha: 0.1 + } + } + } + } + } + } +} + +# Smoothes pose landmark coordinates to reduce jitter. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:enable" + input_stream: "NORM_LANDMARKS:filtered_visibility" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "OBJECT_SCALE_ROI:roi" + output_stream: "NORM_FILTERED_LANDMARKS:filtered_landmarks" + options: { + [mediapipe.SwitchContainerOptions.ext] { + enable: true + contained_node: { + calculator: "LandmarksSmoothingCalculator" + options: { + [mediapipe.LandmarksSmoothingCalculatorOptions.ext] { + no_filter: {} + } + } + } + contained_node: { + calculator: "LandmarksSmoothingCalculator" + options: { + [mediapipe.LandmarksSmoothingCalculatorOptions.ext] { + one_euro_filter { + # Min cutoff 0.1 results into ~0.01 alpha in landmark EMA filter + # when landmark is static. + min_cutoff: 0.05 + # Beta 80.0 in combintation with min_cutoff 0.05 results into + # ~0.94 alpha in landmark EMA filter when landmark is moving fast. + beta: 80.0 + # Derivative cutoff 1.0 results into ~0.17 alpha in landmark + # velocity EMA filter. + derivate_cutoff: 1.0 + } + } + } + } + } + } +} + +# Smoothes world landmark visibilities to reduce jitter. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:enable" + input_stream: "LANDMARKS:world_landmarks" + output_stream: "FILTERED_LANDMARKS:filtered_world_visibility" + options: { + [mediapipe.SwitchContainerOptions.ext] { + enable: true + contained_node: { + calculator: "VisibilitySmoothingCalculator" + options: { + [mediapipe.VisibilitySmoothingCalculatorOptions.ext] { + no_filter: {} + } + } + } + contained_node: { + calculator: "VisibilitySmoothingCalculator" + options: { + [mediapipe.VisibilitySmoothingCalculatorOptions.ext] { + low_pass_filter { + alpha: 0.1 + } + } + } + } + } + } +} + +# Smoothes world landmark coordinates to reduce jitter. +node { + calculator: "SwitchContainer" + input_side_packet: "ENABLE:enable" + input_stream: "LANDMARKS:filtered_world_visibility" + output_stream: "FILTERED_LANDMARKS:filtered_world_landmarks" + options: { + [mediapipe.SwitchContainerOptions.ext] { + enable: true + contained_node: { + calculator: "LandmarksSmoothingCalculator" + options: { + [mediapipe.LandmarksSmoothingCalculatorOptions.ext] { + no_filter: {} + } + } + } + contained_node: { + calculator: "LandmarksSmoothingCalculator" + options: { + [mediapipe.LandmarksSmoothingCalculatorOptions.ext] { + one_euro_filter { + # Min cutoff 0.1 results into ~ 0.02 alpha in landmark EMA filter + # when landmark is static. + min_cutoff: 0.1 + # Beta 40.0 in combintation with min_cutoff 0.1 results into ~0.8 + # alpha in landmark EMA filter when landmark is moving fast. + beta: 40.0 + # Derivative cutoff 1.0 results into ~0.17 alpha in landmark + # velocity EMA filter. + derivate_cutoff: 1.0 + # As world landmdarks are predicted in real world 3D coordintates + # in meters (rather than in pixels of input image) prediction + # scale does not depend on the pose size in the image. + disable_value_scaling: true + } + } + } + } + } + } +} + +# Smoothes auxiliary landmark visibilities to reduce jitter. +node { + calculator: "VisibilitySmoothingCalculator" + input_stream: "NORM_LANDMARKS:aux_landmarks" + output_stream: "NORM_FILTERED_LANDMARKS:filtered_aux_visibility" + options: { + [mediapipe.VisibilitySmoothingCalculatorOptions.ext] { + low_pass_filter { + alpha: 0.1 + } + } + } +} + +# Smoothes auxiliary landmarks to reduce jitter. +node { + calculator: "LandmarksSmoothingCalculator" + input_stream: "NORM_LANDMARKS:filtered_aux_visibility" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "OBJECT_SCALE_ROI:roi" + output_stream: "NORM_FILTERED_LANDMARKS:filtered_aux_landmarks" + options: { + [mediapipe.LandmarksSmoothingCalculatorOptions.ext] { + # Auxiliary landmarks are smoothed heavier than main landmarks to + # make ROI crop for pose landmarks prediction very stable when + # object is not moving but responsive enough in case of sudden + # movements. + one_euro_filter { + # Min cutoff 0.01 results into ~0.002 alpha in landmark EMA + # filter when landmark is static. + min_cutoff: 0.01 + # Beta 10.0 in combintation with min_cutoff 0.01 results into ~0.68 + # alpha in landmark EMA filter when landmark is moving fast. + beta: 10.0 + # Derivative cutoff 1.0 results into ~0.17 alpha in landmark + # velocity EMA filter. + derivate_cutoff: 1.0 + } + } + } +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_full.tflite b/mediapipe/modules/pose_landmark/pose_landmark_full.tflite new file mode 100755 index 0000000..e2ee84f Binary files /dev/null and b/mediapipe/modules/pose_landmark/pose_landmark_full.tflite differ diff --git a/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt new file mode 100644 index 0000000..3ff9ac9 --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_gpu.pbtxt @@ -0,0 +1,268 @@ +# MediaPipe graph to detect/predict pose landmarks. (GPU input, and inference is +# executed on GPU.) This graph tries to skip pose detection as much as possible +# by using previously detected/predicted landmarks for new images. +# +# It is required that "pose_detection.tflite" is available at +# "mediapipe/modules/pose_detection/pose_detection.tflite" +# path during execution. +# +# It is required that "pose_landmark_lite.tflite" or +# "pose_landmark_full.tflite" or "pose_landmark_heavy.tflite" is available at +# "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" or +# "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_COMPLEXITY input side packet. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarkGpu" +# input_side_packet: "MODEL_COMPLEXITY:model_complexity" +# input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" +# input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" +# input_stream: "IMAGE:image" +# output_stream: "LANDMARKS:pose_landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "PoseLandmarkGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# Whether to filter landmarks across different input images to reduce jitter. +# If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_LANDMARKS:smooth_landmarks" + +# Whether to predict the segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Whether to filter segmentation mask across different input images to reduce +# jitter. If unspecified, functions as set to true. (bool) +input_side_packet: "SMOOTH_SEGMENTATION:smooth_segmentation" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# Whether landmarks on the previous image should be used to help localize +# landmarks on the current image. (bool) +input_side_packet: "USE_PREV_LANDMARKS:use_prev_landmarks" + +# Pose landmarks. (NormalizedLandmarkList) +# We have 33 landmarks (see pose_landmark_topology.svg), and there are other +# auxiliary key points. +# 0 - nose +# 1 - left eye (inner) +# 2 - left eye +# 3 - left eye (outer) +# 4 - right eye (inner) +# 5 - right eye +# 6 - right eye (outer) +# 7 - left ear +# 8 - right ear +# 9 - mouth (left) +# 10 - mouth (right) +# 11 - left shoulder +# 12 - right shoulder +# 13 - left elbow +# 14 - right elbow +# 15 - left wrist +# 16 - right wrist +# 17 - left pinky +# 18 - right pinky +# 19 - left index +# 20 - right index +# 21 - left thumb +# 22 - right thumb +# 23 - left hip +# 24 - right hip +# 25 - left knee +# 26 - right knee +# 27 - left ankle +# 28 - right ankle +# 29 - left heel +# 30 - right heel +# 31 - left foot index +# 32 - right foot index +# +# NOTE: if a pose is not present within the given ROI, for this particular +# timestamp there will not be an output packet in the LANDMARKS stream. However, +# the MediaPipe framework will internally inform the downstream calculators of +# the absence of this packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:pose_landmarks" + +# Pose world landmarks. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin at the +# center between hips. WORLD_LANDMARKS shares the same landmark topology as +# LANDMARKS. However, LANDMARKS provides coordinates (in pixels) of a 3D object +# projected onto the 2D image surface, while WORLD_LANDMARKS provides +# coordinates (in meters) of the 3D object itself. +output_stream: "WORLD_LANDMARKS:pose_world_landmarks" + +# Segmentation mask. (GpuBuffer in RGBA, with the same mask values in R and A) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Extra outputs (for debugging, for instance). +# Detected poses. (Detection) +output_stream: "DETECTION:pose_detection" +# Regions of interest calculated based on landmarks. (NormalizedRect) +output_stream: "ROI_FROM_LANDMARKS:pose_rect_from_landmarks" +# Regions of interest calculated based on pose detections. (NormalizedRect) +output_stream: "ROI_FROM_DETECTION:pose_rect_from_detection" + +# When the optional input side packet "use_prev_landmarks" is either absent or +# set to true, uses the landmarks on the previous image to help localize +# landmarks on the current image. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:use_prev_landmarks" + input_stream: "prev_pose_rect_from_landmarks" + output_stream: "gated_prev_pose_rect_from_landmarks" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Checks if there's previous pose rect calculated from landmarks. +node: { + calculator: "PacketPresenceCalculator" + input_stream: "PACKET:gated_prev_pose_rect_from_landmarks" + output_stream: "PRESENCE:prev_pose_rect_from_landmarks_is_present" +} + +# Calculates size of the image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:image_size" +} + +# Drops the incoming image if the pose has already been identified from the +# previous image. Otherwise, passes the incoming image through to trigger a new +# round of pose detection. +node { + calculator: "GateCalculator" + input_stream: "image" + input_stream: "image_size" + input_stream: "DISALLOW:prev_pose_rect_from_landmarks_is_present" + output_stream: "image_for_pose_detection" + output_stream: "image_size_for_pose_detection" + options: { + [mediapipe.GateCalculatorOptions.ext] { + empty_packets_as_allow: true + } + } +} + +# Detects poses. +node { + calculator: "PoseDetectionGpu" + input_stream: "IMAGE:image_for_pose_detection" + output_stream: "DETECTIONS:pose_detections" +} + +# Gets the very first detection from "pose_detections" vector. +node { + calculator: "SplitDetectionVectorCalculator" + input_stream: "pose_detections" + output_stream: "pose_detection" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + element_only: true + } + } +} + +# Calculates region of interest based on pose detection, so that can be used +# to detect landmarks. +node { + calculator: "PoseDetectionToRoi" + input_stream: "DETECTION:pose_detection" + input_stream: "IMAGE_SIZE:image_size_for_pose_detection" + output_stream: "ROI:pose_rect_from_detection" +} + +# Selects either pose rect (or ROI) calculated from detection or from previously +# detected landmarks if available (in this case, calculation of pose rect from +# detection is skipped). +node { + calculator: "MergeCalculator" + input_stream: "pose_rect_from_detection" + input_stream: "gated_prev_pose_rect_from_landmarks" + output_stream: "pose_rect" +} + +# Detects pose landmarks within specified region of interest of the image. +node { + calculator: "PoseLandmarkByRoiGpu" + input_side_packet: "MODEL_COMPLEXITY:model_complexity" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "IMAGE:image" + input_stream: "ROI:pose_rect" + output_stream: "LANDMARKS:unfiltered_pose_landmarks" + output_stream: "AUXILIARY_LANDMARKS:unfiltered_auxiliary_landmarks" + output_stream: "WORLD_LANDMARKS:unfiltered_world_landmarks" + output_stream: "SEGMENTATION_MASK:unfiltered_segmentation_mask" +} + +# Smoothes landmarks to reduce jitter. +node { + calculator: "PoseLandmarkFiltering" + input_side_packet: "ENABLE:smooth_landmarks" + input_stream: "IMAGE_SIZE:image_size" + input_stream: "NORM_LANDMARKS:unfiltered_pose_landmarks" + input_stream: "AUX_NORM_LANDMARKS:unfiltered_auxiliary_landmarks" + input_stream: "WORLD_LANDMARKS:unfiltered_world_landmarks" + output_stream: "FILTERED_NORM_LANDMARKS:pose_landmarks" + output_stream: "FILTERED_AUX_NORM_LANDMARKS:auxiliary_landmarks" + output_stream: "FILTERED_WORLD_LANDMARKS:pose_world_landmarks" +} + +# Calculates region of interest based on the auxiliary landmarks, to be used in +# the subsequent image. +node { + calculator: "PoseLandmarksToRoi" + input_stream: "LANDMARKS:auxiliary_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "ROI:pose_rect_from_landmarks" +} + +# Caches pose rects calculated from landmarks, and upon the arrival of the next +# input image, sends out the cached rects with timestamps replaced by that of +# the input image, essentially generating a packet that carries the previous +# pose rects. Note that upon the arrival of the very first input image, a +# timestamp bound update occurs to jump start the feedback loop. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:image" + input_stream: "LOOP:pose_rect_from_landmarks" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_pose_rect_from_landmarks" +} + +# Smoothes segmentation to reduce jitter. +node { + calculator: "PoseSegmentationFiltering" + input_side_packet: "ENABLE:smooth_segmentation" + input_stream: "SEGMENTATION_MASK:unfiltered_segmentation_mask" + output_stream: "FILTERED_SEGMENTATION_MASK:filtered_segmentation_mask" +} + +# Converts the incoming segmentation mask represented as an Image into the +# corresponding GpuBuffer type. +node: { + calculator: "FromImageCalculator" + input_stream: "IMAGE:filtered_segmentation_mask" + output_stream: "IMAGE_GPU:segmentation_mask" +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite b/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite new file mode 100755 index 0000000..9b767e7 Binary files /dev/null and b/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite differ diff --git a/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite b/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite new file mode 100755 index 0000000..280cc72 Binary files /dev/null and b/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite differ diff --git a/mediapipe/modules/pose_landmark/pose_landmark_model_loader.pbtxt b/mediapipe/modules/pose_landmark/pose_landmark_model_loader.pbtxt new file mode 100644 index 0000000..ce7036e --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_model_loader.pbtxt @@ -0,0 +1,73 @@ +# MediaPipe graph to load a selected pose landmark TF Lite model. + +type: "PoseLandmarkModelLoader" + +# Complexity of the pose landmark model: 0, 1 or 2. Landmark accuracy as well as +# inference latency generally go up with the model complexity. If unspecified, +# functions as set to 1. (int) +input_side_packet: "MODEL_COMPLEXITY:model_complexity" + +# TF Lite model represented as a FlatBuffer. +# (std::unique_ptr>) +output_side_packet: "MODEL:model" + +# Determines path to the desired pose landmark model file. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_complexity" + output_side_packet: "PACKET:model_path" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 1 + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/pose_landmark/pose_landmark_lite.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/pose_landmark/pose_landmark_full.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite" + } + } + } + } + } + } +} + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:model_path" + output_side_packet: "CONTENTS:model_blob" + options: { + [mediapipe.LocalFileContentsCalculatorOptions.ext]: { + text_mode: false + } + } +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:model_blob" + output_side_packet: "MODEL:model" +} diff --git a/mediapipe/modules/pose_landmark/pose_landmark_topology.svg b/mediapipe/modules/pose_landmark/pose_landmark_topology.svg new file mode 100644 index 0000000..a57269d --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmark_topology.svg @@ -0,0 +1,651 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 6 + 5 + 4 + 1 + 2 + 3 + 0 + 8 + 7 + 10 + 9 + 12 + 11 + 21 + 22 + 20 + 18 + 16 + 14 + 13 + 15 + 17 + 19 + 23 + 24 + 26 + 25 + 27 + 28 + 31 + 32 + 30 + 29 + + diff --git a/mediapipe/modules/pose_landmark/pose_landmarks_and_segmentation_inverse_projection.pbtxt b/mediapipe/modules/pose_landmark/pose_landmarks_and_segmentation_inverse_projection.pbtxt new file mode 100644 index 0000000..eec3b9b --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmarks_and_segmentation_inverse_projection.pbtxt @@ -0,0 +1,125 @@ +# MediaPipe graph projecting the landmarks and segmentation mask defined in a +# local coordinate system within a (potentially letterboxed) ROI back to the +# global coordinate system of the full image that contains the ROI. +# +# EXAMPLE: +# node { +# calculator: "PoseLandmarksAndSegmentationInverseProjection" +# input_stream: "IMAGE_SIZE:image_size" +# input_stream: "NORM_RECT:roi" +# input_stream: "LETTERBOX_PADDING:letterbox_padding" +# input_stream: "MATRIX:transformation_matrix" +# input_stream: "LANDMARKS:roi_landmarks" +# input_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" +# input_stream: "WORLD_LANDMARKS:roi_world_landmarks" +# input_stream: "SEGMENTATION_MASK:roi_segmentation_mask" +# output_stream: "LANDMARKS:landmarks" +# output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" +# output_stream: "WORLD_LANDMARKS:world_landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "PoseLandmarksAndSegmentationInverseProjection" + +# Size of the full image (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" + +# ROI within the full image. (NormalizedRect) +input_stream: "NORM_RECT:roi" + +# An array representing the letterbox padding around the ROI from the 4 sides: +# [left, top, right, bottom]. The padding is normalized to [0.f, 1.f] by the +# dimensions of the letterboxed/padded ROI. (std::array) +input_stream: "LETTERBOX_PADDING:letterbox_padding" + +# An array representing a 4x4 row-major-order matrix that maps a point within +# the ROI from the global coordinates of the full image to the local coordinates +# within the letterboxed ROI. (std::array) +input_stream: "MATRIX:transformation_matrix" + +# Input landmarks and segmentation mask in local coordinates within the +# letterboxed ROI, and the corresponding outputs in global coordinates of the +# full image. +# LANDMARKS & AUXILIARY_LANDMARKS (NormalizedLandmarkList) +# WORLD_LANDMARKS (LandmarkList) +# SEGMENTATION_MASK (Image) +input_stream: "LANDMARKS:roi_landmarks" +input_stream: "AUXILIARY_LANDMARKS:roi_auxiliary_landmarks" +input_stream: "WORLD_LANDMARKS:roi_world_landmarks" +input_stream: "SEGMENTATION_MASK:roi_segmentation_mask" +output_stream: "LANDMARKS:landmarks" +output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" +output_stream: "WORLD_LANDMARKS:world_landmarks" +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# ----------------------------------------------------------------------------- +# LANDMARKS +# ----------------------------------------------------------------------------- + +# Adjusts landmarks (already normalized to [0.f, 1.f]) in the letterboxed ROI +# to the corresponding coordinates with the letterbox removed. +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:roi_landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:adjusted_landmarks" +} +node { + calculator: "LandmarkLetterboxRemovalCalculator" + input_stream: "LANDMARKS:roi_auxiliary_landmarks" + input_stream: "LETTERBOX_PADDING:letterbox_padding" + output_stream: "LANDMARKS:adjusted_auxiliary_landmarks" +} + +# Projects the landmarks from the letterbox-removed ROI back to the full image. +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:adjusted_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:landmarks" +} +node { + calculator: "LandmarkProjectionCalculator" + input_stream: "NORM_LANDMARKS:adjusted_auxiliary_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "NORM_LANDMARKS:auxiliary_landmarks" +} + +# ----------------------------------------------------------------------------- +# WORLD_LANDMARKS +# ----------------------------------------------------------------------------- + +# Projects the world landmarks from the letterboxed ROI to the full image. +node { + calculator: "WorldLandmarkProjectionCalculator" + input_stream: "LANDMARKS:roi_world_landmarks" + input_stream: "NORM_RECT:roi" + output_stream: "LANDMARKS:world_landmarks" +} + +# ----------------------------------------------------------------------------- +# SEGMENTATION_MASK +# ----------------------------------------------------------------------------- + +# Calculates the inverse transformation matrix. +node { + calculator: "InverseMatrixCalculator" + input_stream: "MATRIX:transformation_matrix" + output_stream: "MATRIX:inverse_transformation_matrix" +} + +# Projects the segmentation mask from the letterboxed ROI back to the full +# image. +node { + calculator: "WarpAffineCalculator" + input_stream: "IMAGE:roi_segmentation_mask" + input_stream: "MATRIX:inverse_transformation_matrix" + input_stream: "OUTPUT_SIZE:image_size" + output_stream: "IMAGE:segmentation_mask" + options: { + [mediapipe.WarpAffineCalculatorOptions.ext] { + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } +} diff --git a/mediapipe/modules/pose_landmark/pose_landmarks_to_roi.pbtxt b/mediapipe/modules/pose_landmark/pose_landmarks_to_roi.pbtxt new file mode 100644 index 0000000..b1fe0e3 --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_landmarks_to_roi.pbtxt @@ -0,0 +1,51 @@ +# MediaPipe graph to calculate pose region of interest (ROI) from landmarks +# detected by "PoseLandmarkByRoiCpu" or "PoseLandmarkByRoiGpu". +# +# NOTE: this graph is subject to change and should not be used directly. + +type: "PoseLandmarksToRoi" + +# Normalized landmarks. (NormalizedLandmarkList) +input_stream: "LANDMARKS:landmarks" +# Image size (width & height). (std::pair) +input_stream: "IMAGE_SIZE:image_size" +# ROI according to landmarks. (NormalizedRect) +output_stream: "ROI:roi" + +# Converts landmarks to a detection that tightly encloses all landmarks. +node { + calculator: "LandmarksToDetectionCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + output_stream: "DETECTION:detection" +} + +# Converts detection into a rectangle based on center and scale alignment +# points. +node { + calculator: "AlignmentPointsRectsCalculator" + input_stream: "DETECTION:detection" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "NORM_RECT:raw_roi" + options: { + [mediapipe.DetectionsToRectsCalculatorOptions.ext] { + rotation_vector_start_keypoint_index: 0 + rotation_vector_end_keypoint_index: 1 + rotation_vector_target_angle_degrees: 90 + } + } +} + +# Expands pose rect with marging used during training. +node { + calculator: "RectTransformationCalculator" + input_stream: "NORM_RECT:raw_roi" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "roi" + options: { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 1.25 + scale_y: 1.25 + square_long: true + } + } +} diff --git a/mediapipe/modules/pose_landmark/pose_segmentation_filtering.pbtxt b/mediapipe/modules/pose_landmark/pose_segmentation_filtering.pbtxt new file mode 100644 index 0000000..c3882ad --- /dev/null +++ b/mediapipe/modules/pose_landmark/pose_segmentation_filtering.pbtxt @@ -0,0 +1,61 @@ +# MediaPipe graph to filter segmentation masks temporally (across packets with +# incremental timestamps) to reduce jitter. +# +# EXAMPLE: +# node { +# calculator: "PoseSegmentationFiltering" +# input_side_packet: "ENABLE:enable" +# input_stream: "SEGMENTATION_MASK:segmentation_mask" +# output_stream: "FILTERED_SEGMENTATION_MASK:filtered_segmentation_mask" +# } + +type: "PoseSegmentationFiltering" + +# Whether to enable filtering. If unspecified, functions as enabled. (bool) +input_side_packet: "ENABLE:enable" + +# Segmentation mask. (Image) +input_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Filtered segmentation mask. (Image) +output_stream: "FILTERED_SEGMENTATION_MASK:filtered_segmentation_mask" + +# Drops the filtered segmentation mask from the previous frame if filtering is +# not enabled. In that case, the downstream SegmentationSmoothingCalculator +# becomes a simple passthrough. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:enable" + input_stream: "prev_filtered_segmentation_mask" + output_stream: "gated_prev_filtered_segmentation_mask" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: true + } + } +} + +# Smoothes segmentation to reduce jitter. +node { + calculator: "SegmentationSmoothingCalculator" + input_stream: "MASK:segmentation_mask" + input_stream: "MASK_PREVIOUS:gated_prev_filtered_segmentation_mask" + output_stream: "MASK_SMOOTHED:filtered_segmentation_mask" + options { + [mediapipe.SegmentationSmoothingCalculatorOptions.ext] { + combine_with_previous_ratio: 0.7 + } + } +} + +# Caches the filtered segmentation mask, similar to above for the pose rect. +node { + calculator: "PreviousLoopbackCalculator" + input_stream: "MAIN:segmentation_mask" + input_stream: "LOOP:filtered_segmentation_mask" + input_stream_info: { + tag_index: "LOOP" + back_edge: true + } + output_stream: "PREV_LOOP:prev_filtered_segmentation_mask" +} diff --git a/mediapipe/modules/pose_landmark/tensors_to_pose_landmarks_and_segmentation.pbtxt b/mediapipe/modules/pose_landmark/tensors_to_pose_landmarks_and_segmentation.pbtxt new file mode 100644 index 0000000..ac86233 --- /dev/null +++ b/mediapipe/modules/pose_landmark/tensors_to_pose_landmarks_and_segmentation.pbtxt @@ -0,0 +1,265 @@ +# MediaPipe graph performing tensor post processing to detect/predict pose +# landmarks and segmenation mask. +# +# EXAMPLE: +# node { +# calculator: "TensorsToPoseLandmarksAndSegmentation" +# input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" +# input_stream: "TENSORS:tensors" +# output_stream: "LANDMARKS:landmarks" +# output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" +# output_stream: "WORLD_LANDMARKS:world_landmarks" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "TensorsToPoseLandmarksAndSegmentation" + +# Whether to predict segmentation mask. If unspecified, functions as set to +# false. (bool) +input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + +# Tensors from mode inference of +# "mediapipe/modules/pose_landmark/pose_landmark_lite|full|heavy.tflite". +# (std::vector) +# tensors[0]: landmarks +# tensors[1]: pose flag +# tensors[2]: segmentation +# tensors[3]: heatmap +# tensors[4]: world landmarks +input_stream: "TENSORS:tensors" + +# Pose landmarks. (NormalizedLandmarkList) +# We have 33 landmarks (see pose_landmark_topology.svg) and there are other +# auxiliary key points. +# 0 - nose +# 1 - left eye (inner) +# 2 - left eye +# 3 - left eye (outer) +# 4 - right eye (inner) +# 5 - right eye +# 6 - right eye (outer) +# 7 - left ear +# 8 - right ear +# 9 - mouth (left) +# 10 - mouth (right) +# 11 - left shoulder +# 12 - right shoulder +# 13 - left elbow +# 14 - right elbow +# 15 - left wrist +# 16 - right wrist +# 17 - left pinky +# 18 - right pinky +# 19 - left index +# 20 - right index +# 21 - left thumb +# 22 - right thumb +# 23 - left hip +# 24 - right hip +# 25 - left knee +# 26 - right knee +# 27 - left ankle +# 28 - right ankle +# 29 - left heel +# 30 - right heel +# 31 - left foot index +# 32 - right foot index +# +# NOTE: If a pose is not present, for this particular timestamp there will not +# be an output packet in the LANDMARKS stream. However, the MediaPipe framework +# will internally inform the downstream calculators of the absence of this +# packet so that they don't wait for it unnecessarily. +output_stream: "LANDMARKS:landmarks" +# Auxiliary landmarks (e.g., for deriving the ROI in the subsequent image). +# (NormalizedLandmarkList) +output_stream: "AUXILIARY_LANDMARKS:auxiliary_landmarks" + +# Pose world landmarks. (LandmarkList) +# World landmarks are real-world 3D coordinates in meters with the origin at the +# center between hips. WORLD_LANDMARKS shares the same landmark topology as +# LANDMARKS. However, LANDMARKS provides coordinates (in pixels) of a 3D object +# projected onto the 2D image surface, while WORLD_LANDMARKS provides +# coordinates (in meters) of the 3D object itself. +output_stream: "WORLD_LANDMARKS:world_landmarks" + +# Segmentation mask. (Image) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Splits a vector of tensors to multiple vectors according to the ranges +# specified in the option. +node { + calculator: "SplitTensorVectorCalculator" + input_stream: "tensors" + output_stream: "landmark_tensor" + output_stream: "pose_flag_tensor" + output_stream: "segmentation_tensor" + output_stream: "heatmap_tensor" + output_stream: "world_landmark_tensor" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 1 } + ranges: { begin: 1 end: 2 } + ranges: { begin: 2 end: 3 } + ranges: { begin: 3 end: 4 } + ranges: { begin: 4 end: 5 } + } + } +} + +# Converts the pose-flag tensor into a float that represents the confidence +# score of pose presence. +node { + calculator: "TensorsToFloatsCalculator" + input_stream: "TENSORS:pose_flag_tensor" + output_stream: "FLOAT:pose_presence_score" +} + +# Applies a threshold to the confidence score to determine whether a pose is +# present. +node { + calculator: "ThresholdingCalculator" + input_stream: "FLOAT:pose_presence_score" + output_stream: "FLAG:pose_presence" + options: { + [mediapipe.ThresholdingCalculatorOptions.ext] { + threshold: 0.5 + } + } +} + +# Drops input tensors if pose is not present. +node { + calculator: "GateCalculator" + input_stream: "landmark_tensor" + input_stream: "world_landmark_tensor" + input_stream: "segmentation_tensor" + input_stream: "heatmap_tensor" + input_stream: "ALLOW:pose_presence" + output_stream: "ensured_landmark_tensor" + output_stream: "ensured_world_landmark_tensor" + output_stream: "ensured_segmentation_tensor" + output_stream: "ensured_heatmap_tensor" +} + +# ----------------------------------------------------------------------------- +# LANDMARKS +# ----------------------------------------------------------------------------- + +# Decodes the landmark tensors into a vector of landmarks, where the landmark +# coordinates are normalized by the spatial dimensions of the tensor. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_landmark_tensor" + output_stream: "NORM_LANDMARKS:raw_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 39 + input_image_width: 256 + input_image_height: 256 + visibility_activation: SIGMOID + presence_activation: SIGMOID + } + } +} + +# Refines landmarks with the heatmap tensor. +node { + calculator: "RefineLandmarksFromHeatmapCalculator" + input_stream: "NORM_LANDMARKS:raw_landmarks" + input_stream: "TENSORS:ensured_heatmap_tensor" + output_stream: "NORM_LANDMARKS:all_landmarks" + options: { + [mediapipe.RefineLandmarksFromHeatmapCalculatorOptions.ext] { + kernel_size: 7 + } + } +} + +# Splits the landmarks into two sets: the actual pose landmarks and the +# auxiliary landmarks. +node { + calculator: "SplitNormalizedLandmarkListCalculator" + input_stream: "all_landmarks" + output_stream: "landmarks" + output_stream: "auxiliary_landmarks" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 33 } + ranges: { begin: 33 end: 35 } + } + } +} + +# ----------------------------------------------------------------------------- +# WORLD_LANDMARKS +# ----------------------------------------------------------------------------- + +# Decodes the world-landmark tensors into a vector of world landmarks. +node { + calculator: "TensorsToLandmarksCalculator" + input_stream: "TENSORS:ensured_world_landmark_tensor" + output_stream: "LANDMARKS:all_world_landmarks" + options: { + [mediapipe.TensorsToLandmarksCalculatorOptions.ext] { + num_landmarks: 39 + } + } +} + +# Keeps only the actual world landmarks. +node { + calculator: "SplitLandmarkListCalculator" + input_stream: "all_world_landmarks" + output_stream: "world_landmarks_without_visibility" + options: { + [mediapipe.SplitVectorCalculatorOptions.ext] { + ranges: { begin: 0 end: 33 } + } + } +} + +# Reuses the visibility and presence field in pose landmarks for the world +# landmarks. +node { + calculator: "VisibilityCopyCalculator" + input_stream: "NORM_LANDMARKS_FROM:landmarks" + input_stream: "LANDMARKS_TO:world_landmarks_without_visibility" + output_stream: "LANDMARKS_TO:world_landmarks" + options: { + [mediapipe.VisibilityCopyCalculatorOptions.ext] { + copy_visibility: true + copy_presence: true + } + } +} + +# ----------------------------------------------------------------------------- +# SEGMENTATION_MASK +# ----------------------------------------------------------------------------- + +# Drops segmentation tensors if segmentation is not enabled. +node { + calculator: "GateCalculator" + input_side_packet: "ALLOW:enable_segmentation" + input_stream: "ensured_segmentation_tensor" + output_stream: "enabled_segmentation_tensor" + options: { + [mediapipe.GateCalculatorOptions.ext] { + allow: false + } + } +} + +# Decodes the segmentation tensor into a mask image with pixel values in [0, 1] +# (1 for person and 0 for background). +node { + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:enabled_segmentation_tensor" + output_stream: "MASK:segmentation_mask" + options: { + [mediapipe.TensorsToSegmentationCalculatorOptions.ext] { + activation: SIGMOID + gpu_origin: TOP_LEFT + } + } +} diff --git a/mediapipe/modules/selfie_segmentation/BUILD b/mediapipe/modules/selfie_segmentation/BUILD new file mode 100644 index 0000000..7fc271a --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/BUILD @@ -0,0 +1,99 @@ +# Copyright 2021 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_graph.bzl", + "mediapipe_simple_subgraph", +) + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +mediapipe_simple_subgraph( + name = "selfie_segmentation_model_loader", + graph = "selfie_segmentation_model_loader.pbtxt", + register_as = "SelfieSegmentationModelLoader", + deps = [ + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/tflite:tflite_model_calculator", + "//mediapipe/calculators/util:local_file_contents_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "selfie_segmentation_cpu", + graph = "selfie_segmentation_cpu.pbtxt", + register_as = "SelfieSegmentationCpu", + deps = [ + ":selfie_segmentation_model_loader", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_segmentation_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "selfie_segmentation_gpu", + graph = "selfie_segmentation_gpu.pbtxt", + register_as = "SelfieSegmentationGpu", + deps = [ + ":selfie_segmentation_model_loader", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_segmentation_calculator", + "//mediapipe/calculators/tflite:tflite_custom_op_resolver_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/framework/tool:switch_container", + ], +) + +mediapipe_simple_subgraph( + name = "selfie_segmentation_cpu_image", + graph = "selfie_segmentation_cpu_image.pbtxt", + register_as = "SelfieSegmentationCpuImage", + deps = [ + ":selfie_segmentation_cpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +mediapipe_simple_subgraph( + name = "selfie_segmentation_gpu_image", + graph = "selfie_segmentation_gpu_image.pbtxt", + register_as = "SelfieSegmentationGpuImage", + deps = [ + ":selfie_segmentation_gpu", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/calculators/util:to_image_calculator", + ], +) + +exports_files( + srcs = [ + "selfie_segmentation.tflite", + "selfie_segmentation_landscape.tflite", + ], +) diff --git a/mediapipe/modules/selfie_segmentation/README.md b/mediapipe/modules/selfie_segmentation/README.md new file mode 100644 index 0000000..cd6c5e0 --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/README.md @@ -0,0 +1,6 @@ +# selfie_segmentation + +Subgraphs|Details +:--- | :--- +[`SelfieSegmentationCpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt)| Segments the person from background in a selfie image. (CPU input, and inference is executed on CPU.) +[`SelfieSegmentationGpu`](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt)| Segments the person from background in a selfie image. (GPU input, and inference is executed on GPU.) diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite b/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite new file mode 100644 index 0000000..374c072 Binary files /dev/null and b/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite differ diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt b/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt new file mode 100644 index 0000000..5918248 --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu.pbtxt @@ -0,0 +1,132 @@ +# MediaPipe graph to perform selfie segmentation. (CPU input, and all processing +# and inference are also performed on CPU) +# +# It is required that "selfie_segmentation.tflite" or +# "selfie_segmentation_landscape.tflite" is available at +# "mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite" +# or +# "mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_SELECTION input side packet. +# +# EXAMPLE: +# node { +# calculator: "SelfieSegmentationCpu" +# input_side_packet: "MODEL_SELECTION:model_selection" +# input_stream: "IMAGE:image" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "SelfieSegmentationCpu" + +# CPU image. (ImageFrame) +input_stream: "IMAGE:image" + +# An integer 0 or 1. Use 0 to select a general-purpose model (operating on a +# 256x256 tensor), and 1 to select a model (operating on a 256x144 tensor) more +# optimized for landscape images. If unspecified, functions as set to 0. (int) +input_side_packet: "MODEL_SELECTION:model_selection" + +# Segmentation mask. (ImageFrame in ImageFormat::VEC32F1) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Resizes the input image into a tensor with a dimension desired by the model. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_selection" + input_stream: "IMAGE:image" + output_stream: "TENSORS:input_tensors" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 0 + contained_node: { + calculator: "ImageToTensorCalculator" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: false + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } + } + contained_node: { + calculator: "ImageToTensorCalculator" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 144 + keep_aspect_ratio: false + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + } + } + } + } + } +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" +} + +# Loads the selfie segmentation TF Lite model. +node { + calculator: "SelfieSegmentationModelLoader" + input_side_packet: "MODEL_SELECTION:model_selection" + output_side_packet: "MODEL:model" +} + +# Runs model inference on CPU. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:output_tensors" + input_side_packet: "MODEL:model" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" + options: { + [mediapipe.InferenceCalculatorOptions.ext] { + delegate { + xnnpack {} + } + } + } +} + +# Retrieves the size of the input image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_CPU:image" + output_stream: "SIZE:input_size" +} + +# Processes the output tensors into a segmentation mask that has the same size +# as the input image into the graph. +node { + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:output_tensors" + input_stream: "OUTPUT_SIZE:input_size" + output_stream: "MASK:mask_image" + options: { + [mediapipe.TensorsToSegmentationCalculatorOptions.ext] { + activation: NONE + } + } +} + +# Converts the incoming Image into the corresponding ImageFrame type. +node: { + calculator: "FromImageCalculator" + input_stream: "IMAGE:mask_image" + output_stream: "IMAGE_CPU:segmentation_mask" +} diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu_image.pbtxt b/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu_image.pbtxt new file mode 100644 index 0000000..a35ff0e --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/selfie_segmentation_cpu_image.pbtxt @@ -0,0 +1,67 @@ +# MediaPipe graph to perform selfie segmentation. + +type: "SelfieSegmentationCpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" + +# An integer 0 or 1. Use 0 to select a general-purpose model (operating on a +# 256x256 tensor), and 1 to select a model (operating on a 256x144 tensor) more +# optimized for landscape images. If unspecified, functions as set to 0. (int) +input_side_packet: "MODEL_SELECTION:model_selection" + +# Segmentation mask. (Image) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:segmentation_mask" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to ImageFrame for SelfieSegmentationCpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_CPU:raw_image_frame" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into SelfieSegmentationCpu. This maybe needed because OpenGL represents images +# assuming the image origin is at the bottom-left corner, whereas MediaPipe in +# general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE:raw_image_frame" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE:image_frame" +} + +node { + calculator: "SelfieSegmentationCpu" + input_side_packet: "MODEL_SELECTION:model_selection" + input_stream: "IMAGE:image_frame" + output_stream: "SEGMENTATION_MASK:segmentation_mask_image_frame" +} + +node { + calculator: "ToImageCalculator" + input_stream: "IMAGE_CPU:segmentation_mask_image_frame" + output_stream: "IMAGE:segmentation_mask" +} diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt b/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt new file mode 100644 index 0000000..5f9e55e --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu.pbtxt @@ -0,0 +1,133 @@ +# MediaPipe graph to perform selfie segmentation. (GPU input, and all processing +# and inference are also performed on GPU) +# +# It is required that "selfie_segmentation.tflite" or +# "selfie_segmentation_landscape.tflite" is available at +# "mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite" +# or +# "mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite" +# path respectively during execution, depending on the specification in the +# MODEL_SELECTION input side packet. +# +# EXAMPLE: +# node { +# calculator: "SelfieSegmentationGpu" +# input_side_packet: "MODEL_SELECTION:model_selection" +# input_stream: "IMAGE:image" +# output_stream: "SEGMENTATION_MASK:segmentation_mask" +# } + +type: "SelfieSegmentationGpu" + +# GPU image. (GpuBuffer) +input_stream: "IMAGE:image" + +# An integer 0 or 1. Use 0 to select a general-purpose model (operating on a +# 256x256 tensor), and 1 to select a model (operating on a 256x144 tensor) more +# optimized for landscape images. If unspecified, functions as set to 0. (int) +input_side_packet: "MODEL_SELECTION:model_selection" + +# Segmentation mask. (GpuBuffer in RGBA, with the same mask values in R and A) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +# Resizes the input image into a tensor with a dimension desired by the model. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_selection" + input_stream: "IMAGE_GPU:image" + output_stream: "TENSORS:input_tensors" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 0 + contained_node: { + calculator: "ImageToTensorCalculator" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 256 + keep_aspect_ratio: false + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } + } + contained_node: { + calculator: "ImageToTensorCalculator" + options: { + [mediapipe.ImageToTensorCalculatorOptions.ext] { + output_tensor_width: 256 + output_tensor_height: 144 + keep_aspect_ratio: false + output_tensor_float_range { + min: 0.0 + max: 1.0 + } + border_mode: BORDER_ZERO + gpu_origin: TOP_LEFT + } + } + } + } + } +} + +# Generates a single side packet containing a TensorFlow Lite op resolver that +# supports custom ops needed by the model used in this graph. +node { + calculator: "TfLiteCustomOpResolverCalculator" + output_side_packet: "op_resolver" + options: { + [mediapipe.TfLiteCustomOpResolverCalculatorOptions.ext] { + use_gpu: true + } + } +} + +# Loads the selfie segmentation TF Lite model. +node { + calculator: "SelfieSegmentationModelLoader" + input_side_packet: "MODEL_SELECTION:model_selection" + output_side_packet: "MODEL:model" +} + +# Runs model inference on GPU. +node { + calculator: "InferenceCalculator" + input_stream: "TENSORS:input_tensors" + output_stream: "TENSORS:output_tensors" + input_side_packet: "MODEL:model" + input_side_packet: "CUSTOM_OP_RESOLVER:op_resolver" +} + +# Retrieves the size of the input image. +node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:image" + output_stream: "SIZE:input_size" +} + +# Processes the output tensors into a segmentation mask that has the same size +# as the input image into the graph. +node { + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:output_tensors" + input_stream: "OUTPUT_SIZE:input_size" + output_stream: "MASK:mask_image" + options: { + [mediapipe.TensorsToSegmentationCalculatorOptions.ext] { + activation: NONE + gpu_origin: TOP_LEFT + } + } +} + +# Converts the incoming Image into the corresponding GpuBuffer type. +node: { + calculator: "FromImageCalculator" + input_stream: "IMAGE:mask_image" + output_stream: "IMAGE_GPU:segmentation_mask" +} diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu_image.pbtxt b/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu_image.pbtxt new file mode 100644 index 0000000..d5c0935 --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/selfie_segmentation_gpu_image.pbtxt @@ -0,0 +1,67 @@ +# MediaPipe graph to perform selfie segmentation. + +type: "SelfieSegmentationGpuImage" + +# Input image. (Image) +input_stream: "IMAGE:image" + +# The throttled input image. (Image) +output_stream: "IMAGE:throttled_image" + +# An integer 0 or 1. Use 0 to select a general-purpose model (operating on a +# 256x256 tensor), and 1 to select a model (operating on a 256x144 tensor) more +# optimized for landscape images. If unspecified, functions as set to 0. (int) +input_side_packet: "MODEL_SELECTION:model_selection" + +# Segmentation mask. (Image) +output_stream: "SEGMENTATION_MASK:segmentation_mask" + +node { + calculator: "FlowLimiterCalculator" + input_stream: "image" + input_stream: "FINISHED:segmentation_mask" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_image" + options: { + [mediapipe.FlowLimiterCalculatorOptions.ext] { + max_in_flight: 1 + max_in_queue: 1 + } + } +} + +# Converts Image to ImageFrame for SelfieSegmentationGpu to consume. +node { + calculator: "FromImageCalculator" + input_stream: "IMAGE:throttled_image" + output_stream: "IMAGE_GPU:raw_gpu_buffer" + output_stream: "SOURCE_ON_GPU:is_gpu_image" +} + +# TODO: Remove the extra flipping once adopting MlImage. +# If the source images are on gpu, flip the data vertically before sending them +# into SelfieSegmentationGpu. This maybe needed because OpenGL represents images +# assuming the image origin is at the bottom-left corner, whereas MediaPipe in +# general assumes the image origin is at the top-left corner. +node: { + calculator: "ImageTransformationCalculator" + input_stream: "IMAGE_GPU:raw_gpu_buffer" + input_stream: "FLIP_VERTICALLY:is_gpu_image" + output_stream: "IMAGE_GPU:gpu_buffer" +} + +node { + calculator: "SelfieSegmentationGpu" + input_side_packet: "MODEL_SELECTION:model_selection" + input_stream: "IMAGE:gpu_buffer" + output_stream: "SEGMENTATION_MASK:segmentation_mask_gpu_buffer" +} + +node { + calculator: "ToImageCalculator" + input_stream: "IMAGE_GPU:segmentation_mask_gpu_buffer" + output_stream: "IMAGE:segmentation_mask" +} diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite b/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite new file mode 100755 index 0000000..4ea3f8a Binary files /dev/null and b/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite differ diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_model_loader.pbtxt b/mediapipe/modules/selfie_segmentation/selfie_segmentation_model_loader.pbtxt new file mode 100644 index 0000000..39495f8 --- /dev/null +++ b/mediapipe/modules/selfie_segmentation/selfie_segmentation_model_loader.pbtxt @@ -0,0 +1,63 @@ +# MediaPipe graph to load a selected selfie segmentation TF Lite model. + +type: "SelfieSegmentationModelLoader" + +# An integer 0 or 1. Use 0 to select a general-purpose model (operating on a +# 256x256 tensor), and 1 to select a model (operating on a 256x144 tensor) more +# optimized for landscape images. If unspecified, functions as set to 0. (int) +input_side_packet: "MODEL_SELECTION:model_selection" + +# TF Lite model represented as a FlatBuffer. +# (std::unique_ptr>) +output_side_packet: "MODEL:model" + +# Determines path to the desired pose landmark model file. +node { + calculator: "SwitchContainer" + input_side_packet: "SELECT:model_selection" + output_side_packet: "PACKET:model_path" + options: { + [mediapipe.SwitchContainerOptions.ext] { + select: 0 + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite" + } + } + } + } + contained_node: { + calculator: "ConstantSidePacketCalculator" + options: { + [mediapipe.ConstantSidePacketCalculatorOptions.ext]: { + packet { + string_value: "mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite" + } + } + } + } + } + } +} + +# Loads the file in the specified path into a blob. +node { + calculator: "LocalFileContentsCalculator" + input_side_packet: "FILE_PATH:model_path" + output_side_packet: "CONTENTS:model_blob" + options: { + [mediapipe.LocalFileContentsCalculatorOptions.ext]: { + text_mode: false + } + } +} + +# Converts the input blob into a TF Lite model. +node { + calculator: "TfLiteModelCalculator" + input_side_packet: "MODEL_BLOB:model_blob" + output_side_packet: "MODEL:model" +} diff --git a/src/bindings.rs b/src/bindings.rs new file mode 100644 index 0000000..0914443 --- /dev/null +++ b/src/bindings.rs @@ -0,0 +1,4226 @@ +/* automatically generated by rust-bindgen 0.59.2 */ + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct std_allocator { + pub _address: u8, +} +pub type std_allocator_value_type = u8; +pub type std_allocator_size_type = u64; +pub type std_allocator_difference_type = u64; +pub type std_allocator_pointer = u8; +pub type std_allocator_const_pointer = u8; +pub type std_allocator_reference = u8; +pub type std_allocator_const_reference = u8; +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct std_allocator_rebind { + pub _address: u8, +} +pub type std_allocator_rebind_other = u8; +pub type std_allocator_propagate_on_container_move_assignment = u8; +pub type std_allocator_is_always_equal = u8; +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct std_vector { + pub _address: u8, +} +pub type std_vector__Base = u8; +pub type std_vector__Tp_alloc_type = u8; +pub type std_vector__Alloc_traits = u8; +pub type std_vector_value_type = u8; +pub type std_vector_pointer = u8; +pub type std_vector_const_pointer = u8; +pub type std_vector_reference = u8; +pub type std_vector_const_reference = u8; +pub type std_vector_iterator = u8; +pub type std_vector_const_iterator = u8; +pub type std_vector_const_reverse_iterator = u8; +pub type std_vector_reverse_iterator = u8; +pub type std_vector_size_type = u64; +pub type std_vector_difference_type = u64; +pub type std_vector_allocator_type = u8; +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct std_vector__Temporary_value { + pub _address: u8, +} +pub type size_t = ::std::os::raw::c_ulong; +pub type uchar = ::std::os::raw::c_uchar; +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct cv_cuda_GpuMat { + _unused: [u8; 0], +} +#[doc = " @brief Template class for 2D points specified by its coordinates `x` and `y`."] +#[doc = ""] +#[doc = "An instance of the class is interchangeable with C structures, CvPoint and CvPoint2D32f . There is"] +#[doc = "also a cast operator to convert point coordinates to the specified type. The conversion from"] +#[doc = "floating-point coordinates to integer coordinates is done by rounding. Commonly, the conversion"] +#[doc = "uses this operation for each of the coordinates. Besides the class members listed in the"] +#[doc = "declaration above, the following operations on points are implemented:"] +#[doc = "@code"] +#[doc = "pt1 = pt2 + pt3;"] +#[doc = "pt1 = pt2 - pt3;"] +#[doc = "pt1 = pt2 * a;"] +#[doc = "pt1 = a * pt2;"] +#[doc = "pt1 = pt2 / a;"] +#[doc = "pt1 += pt2;"] +#[doc = "pt1 -= pt2;"] +#[doc = "pt1 *= a;"] +#[doc = "pt1 /= a;"] +#[doc = "double value = norm(pt); // L2 norm"] +#[doc = "pt1 == pt2;"] +#[doc = "pt1 != pt2;"] +#[doc = "@endcode"] +#[doc = "For your convenience, the following type aliases are defined:"] +#[doc = "@code"] +#[doc = "typedef Point_ Point2i;"] +#[doc = "typedef Point2i Point;"] +#[doc = "typedef Point_ Point2f;"] +#[doc = "typedef Point_ Point2d;"] +#[doc = "@endcode"] +#[doc = "Example:"] +#[doc = "@code"] +#[doc = "Point2f a(0.3f, 0.f), b(0.f, 0.4f);"] +#[doc = "Point pt = (a + b)*10.f;"] +#[doc = "cout << pt.x << \", \" << pt.y << endl;"] +#[doc = "@endcode"] +pub type cv_Point = [u32; 2usize]; +#[doc = " @brief Template class for specifying the size of an image or rectangle."] +#[doc = ""] +#[doc = "The class includes two members called width and height. The structure can be converted to and from"] +#[doc = "the old OpenCV structures CvSize and CvSize2D32f . The same set of arithmetic and comparison"] +#[doc = "operations as for Point_ is available."] +#[doc = ""] +#[doc = "OpenCV defines the following Size_\\<\\> aliases:"] +#[doc = "@code"] +#[doc = "typedef Size_ Size2i;"] +#[doc = "typedef Size2i Size;"] +#[doc = "typedef Size_ Size2f;"] +#[doc = "@endcode"] +pub type cv_Size = [u32; 2usize]; +#[doc = " @brief Template class for 2D rectangles"] +#[doc = ""] +#[doc = "described by the following parameters:"] +#[doc = "- Coordinates of the top-left corner. This is a default interpretation of Rect_::x and Rect_::y"] +#[doc = "in OpenCV. Though, in your algorithms you may count x and y from the bottom-left corner."] +#[doc = "- Rectangle width and height."] +#[doc = ""] +#[doc = "OpenCV typically assumes that the top and left boundary of the rectangle are inclusive, while the"] +#[doc = "right and bottom boundaries are not. For example, the method Rect_::contains returns true if"] +#[doc = ""] +#[doc = "\\f[x \\leq pt.x < x+width,"] +#[doc = "y \\leq pt.y < y+height\\f]"] +#[doc = ""] +#[doc = "Virtually every loop over an image ROI in OpenCV (where ROI is specified by Rect_\\ ) is"] +#[doc = "implemented as:"] +#[doc = "@code"] +#[doc = "for(int y = roi.y; y < roi.y + roi.height; y++)"] +#[doc = "for(int x = roi.x; x < roi.x + roi.width; x++)"] +#[doc = "{"] +#[doc = "}"] +#[doc = "@endcode"] +#[doc = "In addition to the class members, the following operations on rectangles are implemented:"] +#[doc = "- \\f$\\texttt{rect} = \\texttt{rect} \\pm \\texttt{point}\\f$ (shifting a rectangle by a certain offset)"] +#[doc = "- \\f$\\texttt{rect} = \\texttt{rect} \\pm \\texttt{size}\\f$ (expanding or shrinking a rectangle by a"] +#[doc = "certain amount)"] +#[doc = "- rect += point, rect -= point, rect += size, rect -= size (augmenting operations)"] +#[doc = "- rect = rect1 & rect2 (rectangle intersection)"] +#[doc = "- rect = rect1 | rect2 (minimum area rectangle containing rect1 and rect2 )"] +#[doc = "- rect &= rect1, rect |= rect1 (and the corresponding augmenting operations)"] +#[doc = "- rect == rect1, rect != rect1 (rectangle comparison)"] +#[doc = ""] +#[doc = "This is an example how the partial ordering on rectangles can be established (rect1 \\f$\\subseteq\\f$"] +#[doc = "rect2):"] +#[doc = "@code"] +#[doc = "template inline bool"] +#[doc = "operator <= (const Rect_<_Tp>& r1, const Rect_<_Tp>& r2)"] +#[doc = "{"] +#[doc = "return (r1 & r2) == r1;"] +#[doc = "}"] +#[doc = "@endcode"] +#[doc = "For your convenience, the Rect_\\<\\> alias is available: cv::Rect"] +pub type cv_Rect = [u32; 4usize]; +#[doc = " @brief Template class specifying a continuous subsequence (slice) of a sequence."] +#[doc = ""] +#[doc = "The class is used to specify a row or a column span in a matrix ( Mat ) and for many other purposes."] +#[doc = "Range(a,b) is basically the same as a:b in Matlab or a..b in Python. As in Python, start is an"] +#[doc = "inclusive left boundary of the range and end is an exclusive right boundary of the range. Such a"] +#[doc = "half-opened interval is usually denoted as \\f$[start,end)\\f$ ."] +#[doc = ""] +#[doc = "The static method Range::all() returns a special variable that means \"the whole sequence\" or \"the"] +#[doc = "whole range\", just like \" : \" in Matlab or \" ... \" in Python. All the methods and functions in"] +#[doc = "OpenCV that take Range support this special Range::all() value. But, of course, in case of your own"] +#[doc = "custom processing, you will probably have to check and handle it explicitly:"] +#[doc = "@code"] +#[doc = "void my_function(..., const Range& r, ....)"] +#[doc = "{"] +#[doc = "if(r == Range::all()) {"] +#[doc = "}"] +#[doc = "else {"] +#[doc = "}"] +#[doc = "}"] +#[doc = "@endcode"] +#[repr(C)] +#[repr(align(4))] +#[derive(Debug, Copy, Clone)] +pub struct cv_Range { + pub _bindgen_opaque_blob: [u32; 2usize], +} +#[test] +fn bindgen_test_layout_cv_Range() { + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(cv_Range)) + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(cv_Range)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5Range4sizeEv"] + pub fn cv_Range_size(this: *const cv_Range) -> ::std::os::raw::c_int; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5Range5emptyEv"] + pub fn cv_Range_empty(this: *const cv_Range) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv5Range3allEv"] + pub fn cv_Range_all() -> cv_Range; +} +extern "C" { + #[doc = " Range /////////////////////////////////"] + #[link_name = "\u{1}_ZN2cv5RangeC1Ev"] + pub fn cv_Range_Range(this: *mut cv_Range); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv5RangeC1Eii"] + pub fn cv_Range_Range1( + this: *mut cv_Range, + _start: ::std::os::raw::c_int, + _end: ::std::os::raw::c_int, + ); +} +impl cv_Range { + #[inline] + pub unsafe fn size(&self) -> ::std::os::raw::c_int { + cv_Range_size(self) + } + #[inline] + pub unsafe fn empty(&self) -> bool { + cv_Range_empty(self) + } + #[inline] + pub unsafe fn all() -> cv_Range { + cv_Range_all() + } + #[inline] + pub unsafe fn new() -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Range_Range(__bindgen_tmp.as_mut_ptr()); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new1(_start: ::std::os::raw::c_int, _end: ::std::os::raw::c_int) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Range_Range1(__bindgen_tmp.as_mut_ptr(), _start, _end); + __bindgen_tmp.assume_init() + } +} +#[doc = " @brief Template class for a 4-element vector derived from Vec."] +#[doc = ""] +#[doc = "Being derived from Vec\\<_Tp, 4\\> , Scalar\\_ and Scalar can be used just as typical 4-element"] +#[doc = "vectors. In addition, they can be converted to/from CvScalar . The type Scalar is widely used in"] +#[doc = "OpenCV to pass pixel values."] +pub type cv_Scalar = [u64; 4usize]; +#[doc = "! @addtogroup core"] +#[doc = "! @{"] +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Copy, Clone)] +pub struct cv_BufferPoolController { + pub _bindgen_opaque_blob: u64, +} +#[test] +fn bindgen_test_layout_cv_BufferPoolController() { + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(cv_BufferPoolController)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_BufferPoolController)) + ); +} +pub const cv_AccessFlag_ACCESS_READ: cv_AccessFlag = 16777216; +pub const cv_AccessFlag_ACCESS_WRITE: cv_AccessFlag = 33554432; +pub const cv_AccessFlag_ACCESS_RW: cv_AccessFlag = 50331648; +pub const cv_AccessFlag_ACCESS_MASK: cv_AccessFlag = 50331648; +pub const cv_AccessFlag_ACCESS_FAST: cv_AccessFlag = 67108864; +#[doc = "! @addtogroup core_basic"] +#[doc = "! @{"] +pub type cv_AccessFlag = ::std::os::raw::c_uint; +pub type cv_InputArray = [u64; 3usize]; +pub type cv_OutputArray = [u64; 3usize]; +pub const cv_UMatUsageFlags_USAGE_DEFAULT: cv_UMatUsageFlags = 0; +pub const cv_UMatUsageFlags_USAGE_ALLOCATE_HOST_MEMORY: cv_UMatUsageFlags = 1; +pub const cv_UMatUsageFlags_USAGE_ALLOCATE_DEVICE_MEMORY: cv_UMatUsageFlags = 2; +pub const cv_UMatUsageFlags_USAGE_ALLOCATE_SHARED_MEMORY: cv_UMatUsageFlags = 4; +pub const cv_UMatUsageFlags___UMAT_USAGE_FLAGS_32BIT: cv_UMatUsageFlags = 2147483647; +#[doc = "! Usage flags for allocator"] +pub type cv_UMatUsageFlags = ::std::os::raw::c_uint; +#[doc = " @brief Custom array allocator"] +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Copy, Clone)] +pub struct cv_MatAllocator { + pub _bindgen_opaque_blob: u64, +} +#[test] +fn bindgen_test_layout_cv_MatAllocator() { + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(cv_MatAllocator)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_MatAllocator)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator3mapEPNS_8UMatDataENS_10AccessFlagE"] + pub fn cv_MatAllocator_map( + this: *mut ::std::os::raw::c_void, + data: *mut cv_UMatData, + accessflags: cv_AccessFlag, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator5unmapEPNS_8UMatDataE"] + pub fn cv_MatAllocator_unmap(this: *mut ::std::os::raw::c_void, data: *mut cv_UMatData); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator8downloadEPNS_8UMatDataEPviPKmS5_S5_S5_"] + pub fn cv_MatAllocator_download( + this: *mut ::std::os::raw::c_void, + data: *mut cv_UMatData, + dst: *mut ::std::os::raw::c_void, + dims: ::std::os::raw::c_int, + sz: *const size_t, + srcofs: *const size_t, + srcstep: *const size_t, + dststep: *const size_t, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator6uploadEPNS_8UMatDataEPKviPKmS6_S6_S6_"] + pub fn cv_MatAllocator_upload( + this: *mut ::std::os::raw::c_void, + data: *mut cv_UMatData, + src: *const ::std::os::raw::c_void, + dims: ::std::os::raw::c_int, + sz: *const size_t, + dstofs: *const size_t, + dststep: *const size_t, + srcstep: *const size_t, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator4copyEPNS_8UMatDataES2_iPKmS4_S4_S4_S4_b"] + pub fn cv_MatAllocator_copy( + this: *mut ::std::os::raw::c_void, + srcdata: *mut cv_UMatData, + dstdata: *mut cv_UMatData, + dims: ::std::os::raw::c_int, + sz: *const size_t, + srcofs: *const size_t, + srcstep: *const size_t, + dstofs: *const size_t, + dststep: *const size_t, + sync: bool, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv12MatAllocator23getBufferPoolControllerEPKc"] + pub fn cv_MatAllocator_getBufferPoolController( + this: *mut ::std::os::raw::c_void, + id: *const ::std::os::raw::c_char, + ) -> *mut cv_BufferPoolController; +} +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Copy, Clone)] +pub struct cv_UMatData { + pub _bindgen_opaque_blob: [u64; 13usize], +} +pub const cv_UMatData_MemoryFlag_COPY_ON_MAP: cv_UMatData_MemoryFlag = 1; +pub const cv_UMatData_MemoryFlag_HOST_COPY_OBSOLETE: cv_UMatData_MemoryFlag = 2; +pub const cv_UMatData_MemoryFlag_DEVICE_COPY_OBSOLETE: cv_UMatData_MemoryFlag = 4; +pub const cv_UMatData_MemoryFlag_TEMP_UMAT: cv_UMatData_MemoryFlag = 8; +pub const cv_UMatData_MemoryFlag_TEMP_COPIED_UMAT: cv_UMatData_MemoryFlag = 24; +pub const cv_UMatData_MemoryFlag_USER_ALLOCATED: cv_UMatData_MemoryFlag = 32; +pub const cv_UMatData_MemoryFlag_DEVICE_MEM_MAPPED: cv_UMatData_MemoryFlag = 64; +pub const cv_UMatData_MemoryFlag_ASYNC_CLEANUP: cv_UMatData_MemoryFlag = 128; +pub type cv_UMatData_MemoryFlag = ::std::os::raw::c_uint; +#[test] +fn bindgen_test_layout_cv_UMatData() { + assert_eq!( + ::std::mem::size_of::(), + 104usize, + concat!("Size of: ", stringify!(cv_UMatData)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_UMatData)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatData4lockEv"] + pub fn cv_UMatData_lock(this: *mut cv_UMatData); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatData6unlockEv"] + pub fn cv_UMatData_unlock(this: *mut cv_UMatData); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData16hostCopyObsoleteEv"] + pub fn cv_UMatData_hostCopyObsolete(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData18deviceCopyObsoleteEv"] + pub fn cv_UMatData_deviceCopyObsolete(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData15deviceMemMappedEv"] + pub fn cv_UMatData_deviceMemMapped(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData9copyOnMapEv"] + pub fn cv_UMatData_copyOnMap(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData8tempUMatEv"] + pub fn cv_UMatData_tempUMat(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv8UMatData14tempCopiedUMatEv"] + pub fn cv_UMatData_tempCopiedUMat(this: *const cv_UMatData) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatData20markHostCopyObsoleteEb"] + pub fn cv_UMatData_markHostCopyObsolete(this: *mut cv_UMatData, flag: bool); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatData22markDeviceCopyObsoleteEb"] + pub fn cv_UMatData_markDeviceCopyObsolete(this: *mut cv_UMatData, flag: bool); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatData19markDeviceMemMappedEb"] + pub fn cv_UMatData_markDeviceMemMapped(this: *mut cv_UMatData, flag: bool); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatDataC1EPKNS_12MatAllocatorE"] + pub fn cv_UMatData_UMatData(this: *mut cv_UMatData, allocator: *const cv_MatAllocator); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv8UMatDataD1Ev"] + pub fn cv_UMatData_UMatData_destructor(this: *mut cv_UMatData); +} +impl cv_UMatData { + #[inline] + pub unsafe fn lock(&mut self) { + cv_UMatData_lock(self) + } + #[inline] + pub unsafe fn unlock(&mut self) { + cv_UMatData_unlock(self) + } + #[inline] + pub unsafe fn hostCopyObsolete(&self) -> bool { + cv_UMatData_hostCopyObsolete(self) + } + #[inline] + pub unsafe fn deviceCopyObsolete(&self) -> bool { + cv_UMatData_deviceCopyObsolete(self) + } + #[inline] + pub unsafe fn deviceMemMapped(&self) -> bool { + cv_UMatData_deviceMemMapped(self) + } + #[inline] + pub unsafe fn copyOnMap(&self) -> bool { + cv_UMatData_copyOnMap(self) + } + #[inline] + pub unsafe fn tempUMat(&self) -> bool { + cv_UMatData_tempUMat(self) + } + #[inline] + pub unsafe fn tempCopiedUMat(&self) -> bool { + cv_UMatData_tempCopiedUMat(self) + } + #[inline] + pub unsafe fn markHostCopyObsolete(&mut self, flag: bool) { + cv_UMatData_markHostCopyObsolete(self, flag) + } + #[inline] + pub unsafe fn markDeviceCopyObsolete(&mut self, flag: bool) { + cv_UMatData_markDeviceCopyObsolete(self, flag) + } + #[inline] + pub unsafe fn markDeviceMemMapped(&mut self, flag: bool) { + cv_UMatData_markDeviceMemMapped(self, flag) + } + #[inline] + pub unsafe fn new(allocator: *const cv_MatAllocator) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMatData_UMatData(__bindgen_tmp.as_mut_ptr(), allocator); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + cv_UMatData_UMatData_destructor(self) + } +} +#[doc = " @brief n-dimensional dense array class \\anchor CVMat_Details"] +#[doc = ""] +#[doc = "The class Mat represents an n-dimensional dense numerical single-channel or multi-channel array. It"] +#[doc = "can be used to store real or complex-valued vectors and matrices, grayscale or color images, voxel"] +#[doc = "volumes, vector fields, point clouds, tensors, histograms (though, very high-dimensional histograms"] +#[doc = "may be better stored in a SparseMat ). The data layout of the array `M` is defined by the array"] +#[doc = "`M.step[]`, so that the address of element \\f$(i_0,...,i_{M.dims-1})\\f$, where \\f$0\\leq i_k(), + 96usize, + concat!("Size of: ", stringify!(cv_Mat)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_Mat)) + ); +} +extern "C" { + #[doc = "! retrieve UMat from Mat"] + #[link_name = "\u{1}_ZNK2cv3Mat7getUMatENS_10AccessFlagENS_14UMatUsageFlagsE"] + pub fn cv_Mat_getUMat( + this: *const cv_Mat, + accessFlags: cv_AccessFlag, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[doc = " @brief Creates a matrix header for the specified matrix row."] + #[doc = ""] + #[doc = "The method makes a new header for the specified matrix row and returns it. This is an O(1)"] + #[doc = "operation, regardless of the matrix size. The underlying data of the new matrix is shared with the"] + #[doc = "original matrix. Here is the example of one of the classical basic matrix processing operations,"] + #[doc = "axpy, used by LU and many other algorithms:"] + #[doc = "@code"] + #[doc = "inline void matrix_axpy(Mat& A, int i, int j, double alpha)"] + #[doc = "{"] + #[doc = "A.row(i) += A.row(j)*alpha;"] + #[doc = "}"] + #[doc = "@endcode"] + #[doc = "@note In the current implementation, the following code does not work as expected:"] + #[doc = "@code"] + #[doc = "Mat A;"] + #[doc = "..."] + #[doc = "A.row(i) = A.row(j); // will not work"] + #[doc = "@endcode"] + #[doc = "This happens because A.row(i) forms a temporary header that is further assigned to another header."] + #[doc = "Remember that each of these operations is O(1), that is, no data is copied. Thus, the above"] + #[doc = "assignment is not true if you may have expected the j-th row to be copied to the i-th row. To"] + #[doc = "achieve that, you should either turn this simple assignment into an expression or use the"] + #[doc = "Mat::copyTo method:"] + #[doc = "@code"] + #[doc = "Mat A;"] + #[doc = "..."] + #[doc = "A.row(i) = A.row(j) + 0;"] + #[doc = "A.row(j).copyTo(A.row(i));"] + #[doc = "@endcode"] + #[doc = "@param y A 0-based row index."] + #[link_name = "\u{1}_ZNK2cv3Mat3rowEi"] + pub fn cv_Mat_row(this: *const cv_Mat, y: ::std::os::raw::c_int) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Creates a matrix header for the specified matrix column."] + #[doc = ""] + #[doc = "The method makes a new header for the specified matrix column and returns it. This is an O(1)"] + #[doc = "operation, regardless of the matrix size. The underlying data of the new matrix is shared with the"] + #[doc = "original matrix. See also the Mat::row description."] + #[doc = "@param x A 0-based column index."] + #[link_name = "\u{1}_ZNK2cv3Mat3colEi"] + pub fn cv_Mat_col(this: *const cv_Mat, x: ::std::os::raw::c_int) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Creates a matrix header for the specified row span."] + #[doc = ""] + #[doc = "The method makes a new header for the specified row span of the matrix. Similarly to Mat::row and"] + #[doc = "Mat::col , this is an O(1) operation."] + #[doc = "@param startrow An inclusive 0-based start index of the row span."] + #[doc = "@param endrow An exclusive 0-based ending index of the row span."] + #[link_name = "\u{1}_ZNK2cv3Mat8rowRangeEii"] + pub fn cv_Mat_rowRange( + this: *const cv_Mat, + startrow: ::std::os::raw::c_int, + endrow: ::std::os::raw::c_int, + ) -> cv_Mat; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param r Range structure containing both the start and the end indices."] + #[link_name = "\u{1}_ZNK2cv3Mat8rowRangeERKNS_5RangeE"] + pub fn cv_Mat_rowRange1(this: *const cv_Mat, r: *const cv_Range) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Creates a matrix header for the specified column span."] + #[doc = ""] + #[doc = "The method makes a new header for the specified column span of the matrix. Similarly to Mat::row and"] + #[doc = "Mat::col , this is an O(1) operation."] + #[doc = "@param startcol An inclusive 0-based start index of the column span."] + #[doc = "@param endcol An exclusive 0-based ending index of the column span."] + #[link_name = "\u{1}_ZNK2cv3Mat8colRangeEii"] + pub fn cv_Mat_colRange( + this: *const cv_Mat, + startcol: ::std::os::raw::c_int, + endcol: ::std::os::raw::c_int, + ) -> cv_Mat; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param r Range structure containing both the start and the end indices."] + #[link_name = "\u{1}_ZNK2cv3Mat8colRangeERKNS_5RangeE"] + pub fn cv_Mat_colRange1(this: *const cv_Mat, r: *const cv_Range) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Extracts a diagonal from a matrix"] + #[doc = ""] + #[doc = "The method makes a new header for the specified matrix diagonal. The new matrix is represented as a"] + #[doc = "single-column matrix. Similarly to Mat::row and Mat::col, this is an O(1) operation."] + #[doc = "@param d index of the diagonal, with the following values:"] + #[doc = "- `d=0` is the main diagonal."] + #[doc = "- `d<0` is a diagonal from the lower half. For example, d=-1 means the diagonal is set"] + #[doc = "immediately below the main one."] + #[doc = "- `d>0` is a diagonal from the upper half. For example, d=1 means the diagonal is set"] + #[doc = "immediately above the main one."] + #[doc = "For example:"] + #[doc = "@code"] + #[doc = "Mat m = (Mat_(3,3) <<"] + #[doc = "1,2,3,"] + #[doc = "4,5,6,"] + #[doc = "7,8,9);"] + #[doc = "Mat d0 = m.diag(0);"] + #[doc = "Mat d1 = m.diag(1);"] + #[doc = "Mat d_1 = m.diag(-1);"] + #[doc = "@endcode"] + #[doc = "The resulting matrices are"] + #[doc = "@code"] + #[doc = "d0 ="] + #[doc = "[1;"] + #[doc = "5;"] + #[doc = "9]"] + #[doc = "d1 ="] + #[doc = "[2;"] + #[doc = "6]"] + #[doc = "d_1 ="] + #[doc = "[4;"] + #[doc = "8]"] + #[doc = "@endcode"] + #[link_name = "\u{1}_ZNK2cv3Mat4diagEi"] + pub fn cv_Mat_diag(this: *const cv_Mat, d: ::std::os::raw::c_int) -> cv_Mat; +} +extern "C" { + #[doc = " @brief creates a diagonal matrix"] + #[doc = ""] + #[doc = "The method creates a square diagonal matrix from specified main diagonal."] + #[doc = "@param d One-dimensional matrix that represents the main diagonal."] + #[link_name = "\u{1}_ZN2cv3Mat4diagERKS0_"] + pub fn cv_Mat_diag1(d: *const cv_Mat) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Creates a full copy of the array and the underlying data."] + #[doc = ""] + #[doc = "The method creates a full copy of the array. The original step[] is not taken into account. So, the"] + #[doc = "array copy is a continuous array occupying total()*elemSize() bytes."] + #[link_name = "\u{1}_ZNK2cv3Mat5cloneEv"] + pub fn cv_Mat_clone(this: *const cv_Mat) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Copies the matrix to another one."] + #[doc = ""] + #[doc = "The method copies the matrix data to another matrix. Before copying the data, the method invokes :"] + #[doc = "@code"] + #[doc = "m.create(this->size(), this->type());"] + #[doc = "@endcode"] + #[doc = "so that the destination matrix is reallocated if needed. While m.copyTo(m); works flawlessly, the"] + #[doc = "function does not handle the case of a partial overlap between the source and the destination"] + #[doc = "matrices."] + #[doc = ""] + #[doc = "When the operation mask is specified, if the Mat::create call shown above reallocates the matrix,"] + #[doc = "the newly allocated matrix is initialized with all zeros before copying the data."] + #[doc = "@param m Destination matrix. If it does not have a proper size or type before the operation, it is"] + #[doc = "reallocated."] + #[link_name = "\u{1}_ZNK2cv3Mat6copyToERKNS_12_OutputArrayE"] + pub fn cv_Mat_copyTo(this: *const cv_Mat, m: cv_OutputArray); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Destination matrix. If it does not have a proper size or type before the operation, it is"] + #[doc = "reallocated."] + #[doc = "@param mask Operation mask of the same size as \\*this. Its non-zero elements indicate which matrix"] + #[doc = "elements need to be copied. The mask has to be of type CV_8U and can have 1 or multiple channels."] + #[link_name = "\u{1}_ZNK2cv3Mat6copyToERKNS_12_OutputArrayERKNS_11_InputArrayE"] + pub fn cv_Mat_copyTo1(this: *const cv_Mat, m: cv_OutputArray, mask: cv_InputArray); +} +extern "C" { + #[doc = " @brief Converts an array to another data type with optional scaling."] + #[doc = ""] + #[doc = "The method converts source pixel values to the target data type. saturate_cast\\<\\> is applied at"] + #[doc = "the end to avoid possible overflows:"] + #[doc = ""] + #[doc = "\\f[m(x,y) = saturate \\_ cast( \\alpha (*this)(x,y) + \\beta )\\f]"] + #[doc = "@param m output matrix; if it does not have a proper size or type before the operation, it is"] + #[doc = "reallocated."] + #[doc = "@param rtype desired output matrix type or, rather, the depth since the number of channels are the"] + #[doc = "same as the input has; if rtype is negative, the output matrix will have the same type as the input."] + #[doc = "@param alpha optional scale factor."] + #[doc = "@param beta optional delta added to the scaled values."] + #[link_name = "\u{1}_ZNK2cv3Mat9convertToERKNS_12_OutputArrayEidd"] + pub fn cv_Mat_convertTo( + this: *const cv_Mat, + m: cv_OutputArray, + rtype: ::std::os::raw::c_int, + alpha: f64, + beta: f64, + ); +} +extern "C" { + #[doc = " @brief Provides a functional form of convertTo."] + #[doc = ""] + #[doc = "This is an internally used method called by the @ref MatrixExpressions engine."] + #[doc = "@param m Destination array."] + #[doc = "@param type Desired destination array depth (or -1 if it should be the same as the source type)."] + #[link_name = "\u{1}_ZNK2cv3Mat8assignToERS0_i"] + pub fn cv_Mat_assignTo(this: *const cv_Mat, m: *mut cv_Mat, type_: ::std::os::raw::c_int); +} +extern "C" { + #[doc = " @brief Sets all or some of the array elements to the specified value."] + #[doc = ""] + #[doc = "This is an advanced variant of the Mat::operator=(const Scalar& s) operator."] + #[doc = "@param value Assigned scalar converted to the actual array type."] + #[doc = "@param mask Operation mask of the same size as \\*this. Its non-zero elements indicate which matrix"] + #[doc = "elements need to be copied. The mask has to be of type CV_8U and can have 1 or multiple channels"] + #[link_name = "\u{1}_ZN2cv3Mat5setToERKNS_11_InputArrayES3_"] + pub fn cv_Mat_setTo( + this: *mut cv_Mat, + value: cv_InputArray, + mask: cv_InputArray, + ) -> *mut cv_Mat; +} +extern "C" { + #[doc = " @brief Changes the shape and/or the number of channels of a 2D matrix without copying the data."] + #[doc = ""] + #[doc = "The method makes a new matrix header for \\*this elements. The new matrix may have a different size"] + #[doc = "and/or different number of channels. Any combination is possible if:"] + #[doc = "- No extra elements are included into the new matrix and no elements are excluded. Consequently,"] + #[doc = "the product rows\\*cols\\*channels() must stay the same after the transformation."] + #[doc = "- No data is copied. That is, this is an O(1) operation. Consequently, if you change the number of"] + #[doc = "rows, or the operation changes the indices of elements row in some other way, the matrix must be"] + #[doc = "continuous. See Mat::isContinuous ."] + #[doc = ""] + #[doc = "For example, if there is a set of 3D points stored as an STL vector, and you want to represent the"] + #[doc = "points as a 3xN matrix, do the following:"] + #[doc = "@code"] + #[doc = "std::vector vec;"] + #[doc = "..."] + #[doc = "Mat pointMat = Mat(vec). // convert vector to Mat, O(1) operation"] + #[doc = "reshape(1). // make Nx3 1-channel matrix out of Nx1 3-channel."] + #[doc = "t(); // finally, transpose the Nx3 matrix."] + #[doc = "@endcode"] + #[doc = "@param cn New number of channels. If the parameter is 0, the number of channels remains the same."] + #[doc = "@param rows New number of rows. If the parameter is 0, the number of rows remains the same."] + #[link_name = "\u{1}_ZNK2cv3Mat7reshapeEii"] + pub fn cv_Mat_reshape( + this: *const cv_Mat, + cn: ::std::os::raw::c_int, + rows: ::std::os::raw::c_int, + ) -> cv_Mat; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZNK2cv3Mat7reshapeEiiPKi"] + pub fn cv_Mat_reshape1( + this: *const cv_Mat, + cn: ::std::os::raw::c_int, + newndims: ::std::os::raw::c_int, + newsz: *const ::std::os::raw::c_int, + ) -> cv_Mat; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZNK2cv3Mat7reshapeEiRKSt6vectorIiSaIiEE"] + pub fn cv_Mat_reshape2( + this: *const cv_Mat, + cn: ::std::os::raw::c_int, + newshape: *const [u64; 3usize], + ) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Transposes a matrix."] + #[doc = ""] + #[doc = "The method performs matrix transposition by means of matrix expressions. It does not perform the"] + #[doc = "actual transposition but returns a temporary matrix transposition object that can be further used as"] + #[doc = "a part of more complex matrix expressions or can be assigned to a matrix:"] + #[doc = "@code"] + #[doc = "Mat A1 = A + Mat::eye(A.size(), A.type())*lambda;"] + #[doc = "Mat C = A1.t()*A1; // compute (A + lambda*I)^t * (A + lamda*I)"] + #[doc = "@endcode"] + #[link_name = "\u{1}_ZNK2cv3Mat1tEv"] + pub fn cv_Mat_t(this: *const cv_Mat) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Inverses a matrix."] + #[doc = ""] + #[doc = "The method performs a matrix inversion by means of matrix expressions. This means that a temporary"] + #[doc = "matrix inversion object is returned by the method and can be used further as a part of more complex"] + #[doc = "matrix expressions or can be assigned to a matrix."] + #[doc = "@param method Matrix inversion method. One of cv::DecompTypes"] + #[link_name = "\u{1}_ZNK2cv3Mat3invEi"] + pub fn cv_Mat_inv(this: *const cv_Mat, method: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Performs an element-wise multiplication or division of the two matrices."] + #[doc = ""] + #[doc = "The method returns a temporary object encoding per-element array multiplication, with optional"] + #[doc = "scale. Note that this is not a matrix multiplication that corresponds to a simpler \"\\*\" operator."] + #[doc = ""] + #[doc = "Example:"] + #[doc = "@code"] + #[doc = "Mat C = A.mul(5/B); // equivalent to divide(A, B, C, 5)"] + #[doc = "@endcode"] + #[doc = "@param m Another array of the same type and the same size as \\*this, or a matrix expression."] + #[doc = "@param scale Optional scale factor."] + #[link_name = "\u{1}_ZNK2cv3Mat3mulERKNS_11_InputArrayEd"] + pub fn cv_Mat_mul(this: *const cv_Mat, m: cv_InputArray, scale: f64) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Computes a cross-product of two 3-element vectors."] + #[doc = ""] + #[doc = "The method computes a cross-product of two 3-element vectors. The vectors must be 3-element"] + #[doc = "floating-point vectors of the same shape and size. The result is another 3-element vector of the"] + #[doc = "same shape and type as operands."] + #[doc = "@param m Another cross-product operand."] + #[link_name = "\u{1}_ZNK2cv3Mat5crossERKNS_11_InputArrayE"] + pub fn cv_Mat_cross(this: *const cv_Mat, m: cv_InputArray) -> cv_Mat; +} +extern "C" { + #[doc = " @brief Computes a dot-product of two vectors."] + #[doc = ""] + #[doc = "The method computes a dot-product of two matrices. If the matrices are not single-column or"] + #[doc = "single-row vectors, the top-to-bottom left-to-right scan ordering is used to treat them as 1D"] + #[doc = "vectors. The vectors must have the same size and type. If the matrices have more than one channel,"] + #[doc = "the dot products from all the channels are summed together."] + #[doc = "@param m another dot-product operand."] + #[link_name = "\u{1}_ZNK2cv3Mat3dotERKNS_11_InputArrayE"] + pub fn cv_Mat_dot(this: *const cv_Mat, m: cv_InputArray) -> f64; +} +extern "C" { + #[doc = " @brief Returns a zero array of the specified size and type."] + #[doc = ""] + #[doc = "The method returns a Matlab-style zero array initializer. It can be used to quickly form a constant"] + #[doc = "array as a function parameter, part of a matrix expression, or as a matrix initializer:"] + #[doc = "@code"] + #[doc = "Mat A;"] + #[doc = "A = Mat::zeros(3, 3, CV_32F);"] + #[doc = "@endcode"] + #[doc = "In the example above, a new matrix is allocated only if A is not a 3x3 floating-point matrix."] + #[doc = "Otherwise, the existing matrix A is filled with zeros."] + #[doc = "@param rows Number of rows."] + #[doc = "@param cols Number of columns."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat5zerosEiii"] + pub fn cv_Mat_zeros( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size Alternative to the matrix size specification Size(cols, rows) ."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat5zerosENS_5Size_IiEEi"] + pub fn cv_Mat_zeros1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims Array dimensionality."] + #[doc = "@param sz Array of integers specifying the array shape."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat5zerosEiPKii"] + pub fn cv_Mat_zeros2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Returns an array of all 1's of the specified size and type."] + #[doc = ""] + #[doc = "The method returns a Matlab-style 1's array initializer, similarly to Mat::zeros. Note that using"] + #[doc = "this method you can initialize an array with an arbitrary value, using the following Matlab idiom:"] + #[doc = "@code"] + #[doc = "Mat A = Mat::ones(100, 100, CV_8U)*3; // make 100x100 matrix filled with 3."] + #[doc = "@endcode"] + #[doc = "The above operation does not form a 100x100 matrix of 1's and then multiply it by 3. Instead, it"] + #[doc = "just remembers the scale factor (3 in this case) and use it when actually invoking the matrix"] + #[doc = "initializer."] + #[doc = "@note In case of multi-channels type, only the first channel will be initialized with 1's, the"] + #[doc = "others will be set to 0's."] + #[doc = "@param rows Number of rows."] + #[doc = "@param cols Number of columns."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat4onesEiii"] + pub fn cv_Mat_ones( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size Alternative to the matrix size specification Size(cols, rows) ."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat4onesENS_5Size_IiEEi"] + pub fn cv_Mat_ones1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims Array dimensionality."] + #[doc = "@param sz Array of integers specifying the array shape."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat4onesEiPKii"] + pub fn cv_Mat_ones2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Returns an identity matrix of the specified size and type."] + #[doc = ""] + #[doc = "The method returns a Matlab-style identity matrix initializer, similarly to Mat::zeros. Similarly to"] + #[doc = "Mat::ones, you can use a scale operation to create a scaled identity matrix efficiently:"] + #[doc = "@code"] + #[doc = "Mat A = Mat::eye(4, 4, CV_32F)*0.1;"] + #[doc = "@endcode"] + #[doc = "@note In case of multi-channels type, identity matrix will be initialized only for the first channel,"] + #[doc = "the others will be set to 0's"] + #[doc = "@param rows Number of rows."] + #[doc = "@param cols Number of columns."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat3eyeEiii"] + pub fn cv_Mat_eye( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size Alternative matrix size specification as Size(cols, rows) ."] + #[doc = "@param type Created matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat3eyeENS_5Size_IiEEi"] + pub fn cv_Mat_eye1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[doc = " @brief Allocates new array data if needed."] + #[doc = ""] + #[doc = "This is one of the key Mat methods. Most new-style OpenCV functions and methods that produce arrays"] + #[doc = "call this method for each output array. The method uses the following algorithm:"] + #[doc = ""] + #[doc = "-# If the current array shape and the type match the new ones, return immediately. Otherwise,"] + #[doc = "de-reference the previous data by calling Mat::release."] + #[doc = "-# Initialize the new header."] + #[doc = "-# Allocate the new data of total()\\*elemSize() bytes."] + #[doc = "-# Allocate the new, associated with the data, reference counter and set it to 1."] + #[doc = ""] + #[doc = "Such a scheme makes the memory management robust and efficient at the same time and helps avoid"] + #[doc = "extra typing for you. This means that usually there is no need to explicitly allocate output arrays."] + #[doc = "That is, instead of writing:"] + #[doc = "@code"] + #[doc = "Mat color;"] + #[doc = "..."] + #[doc = "Mat gray(color.rows, color.cols, color.depth());"] + #[doc = "cvtColor(color, gray, COLOR_BGR2GRAY);"] + #[doc = "@endcode"] + #[doc = "you can simply write:"] + #[doc = "@code"] + #[doc = "Mat color;"] + #[doc = "..."] + #[doc = "Mat gray;"] + #[doc = "cvtColor(color, gray, COLOR_BGR2GRAY);"] + #[doc = "@endcode"] + #[doc = "because cvtColor, as well as the most of OpenCV functions, calls Mat::create() for the output array"] + #[doc = "internally."] + #[doc = "@param rows New number of rows."] + #[doc = "@param cols New number of columns."] + #[doc = "@param type New matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat6createEiii"] + pub fn cv_Mat_create( + this: *mut cv_Mat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size Alternative new matrix size specification: Size(cols, rows)"] + #[doc = "@param type New matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat6createENS_5Size_IiEEi"] + pub fn cv_Mat_create1(this: *mut cv_Mat, size: cv_Size, type_: ::std::os::raw::c_int); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims New array dimensionality."] + #[doc = "@param sizes Array of integers specifying a new array shape."] + #[doc = "@param type New matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat6createEiPKii"] + pub fn cv_Mat_create2( + this: *mut cv_Mat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param sizes Array of integers specifying a new array shape."] + #[doc = "@param type New matrix type."] + #[link_name = "\u{1}_ZN2cv3Mat6createERKSt6vectorIiSaIiEEi"] + pub fn cv_Mat_create3( + this: *mut cv_Mat, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @brief Increments the reference counter."] + #[doc = ""] + #[doc = "The method increments the reference counter associated with the matrix data. If the matrix header"] + #[doc = "points to an external data set (see Mat::Mat ), the reference counter is NULL, and the method has no"] + #[doc = "effect in this case. Normally, to avoid memory leaks, the method should not be called explicitly. It"] + #[doc = "is called implicitly by the matrix assignment operator. The reference counter increment is an atomic"] + #[doc = "operation on the platforms that support it. Thus, it is safe to operate on the same matrices"] + #[doc = "asynchronously in different threads."] + #[link_name = "\u{1}_ZN2cv3Mat6addrefEv"] + pub fn cv_Mat_addref(this: *mut cv_Mat); +} +extern "C" { + #[doc = " @brief Decrements the reference counter and deallocates the matrix if needed."] + #[doc = ""] + #[doc = "The method decrements the reference counter associated with the matrix data. When the reference"] + #[doc = "counter reaches 0, the matrix data is deallocated and the data and the reference counter pointers"] + #[doc = "are set to NULL's. If the matrix header points to an external data set (see Mat::Mat ), the"] + #[doc = "reference counter is NULL, and the method has no effect in this case."] + #[doc = ""] + #[doc = "This method can be called manually to force the matrix data deallocation. But since this method is"] + #[doc = "automatically called in the destructor, or by any other method that changes the data pointer, it is"] + #[doc = "usually not needed. The reference counter decrement and check for 0 is an atomic operation on the"] + #[doc = "platforms that support it. Thus, it is safe to operate on the same matrices asynchronously in"] + #[doc = "different threads."] + #[link_name = "\u{1}_ZN2cv3Mat7releaseEv"] + pub fn cv_Mat_release(this: *mut cv_Mat); +} +extern "C" { + #[doc = "! internal use function, consider to use 'release' method instead; deallocates the matrix data"] + #[link_name = "\u{1}_ZN2cv3Mat10deallocateEv"] + pub fn cv_Mat_deallocate(this: *mut cv_Mat); +} +extern "C" { + #[doc = "! internal use function; properly re-allocates _size, _step arrays"] + #[link_name = "\u{1}_ZN2cv3Mat8copySizeERKS0_"] + pub fn cv_Mat_copySize(this: *mut cv_Mat, m: *const cv_Mat); +} +extern "C" { + #[doc = " @brief Reserves space for the certain number of rows."] + #[doc = ""] + #[doc = "The method reserves space for sz rows. If the matrix already has enough space to store sz rows,"] + #[doc = "nothing happens. If the matrix is reallocated, the first Mat::rows rows are preserved. The method"] + #[doc = "emulates the corresponding method of the STL vector class."] + #[doc = "@param sz Number of rows."] + #[link_name = "\u{1}_ZN2cv3Mat7reserveEm"] + pub fn cv_Mat_reserve(this: *mut cv_Mat, sz: size_t); +} +extern "C" { + #[doc = " @brief Reserves space for the certain number of bytes."] + #[doc = ""] + #[doc = "The method reserves space for sz bytes. If the matrix already has enough space to store sz bytes,"] + #[doc = "nothing happens. If matrix has to be reallocated its previous content could be lost."] + #[doc = "@param sz Number of bytes."] + #[link_name = "\u{1}_ZN2cv3Mat13reserveBufferEm"] + pub fn cv_Mat_reserveBuffer(this: *mut cv_Mat, sz: size_t); +} +extern "C" { + #[doc = " @brief Changes the number of matrix rows."] + #[doc = ""] + #[doc = "The methods change the number of matrix rows. If the matrix is reallocated, the first"] + #[doc = "min(Mat::rows, sz) rows are preserved. The methods emulate the corresponding methods of the STL"] + #[doc = "vector class."] + #[doc = "@param sz New number of rows."] + #[link_name = "\u{1}_ZN2cv3Mat6resizeEm"] + pub fn cv_Mat_resize(this: *mut cv_Mat, sz: size_t); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param sz New number of rows."] + #[doc = "@param s Value assigned to the newly added elements."] + #[link_name = "\u{1}_ZN2cv3Mat6resizeEmRKNS_7Scalar_IdEE"] + pub fn cv_Mat_resize1(this: *mut cv_Mat, sz: size_t, s: *const cv_Scalar); +} +extern "C" { + #[doc = "! internal function"] + #[link_name = "\u{1}_ZN2cv3Mat10push_back_EPKv"] + pub fn cv_Mat_push_back_(this: *mut cv_Mat, elem: *const ::std::os::raw::c_void); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Added line(s)."] + #[link_name = "\u{1}_ZN2cv3Mat9push_backERKS0_"] + pub fn cv_Mat_push_back(this: *mut cv_Mat, m: *const cv_Mat); +} +extern "C" { + #[doc = " @brief Removes elements from the bottom of the matrix."] + #[doc = ""] + #[doc = "The method removes one or more rows from the bottom of the matrix."] + #[doc = "@param nelems Number of removed rows. If it is greater than the total number of rows, an exception"] + #[doc = "is thrown."] + #[link_name = "\u{1}_ZN2cv3Mat8pop_backEm"] + pub fn cv_Mat_pop_back(this: *mut cv_Mat, nelems: size_t); +} +extern "C" { + #[doc = " @brief Locates the matrix header within a parent matrix."] + #[doc = ""] + #[doc = "After you extracted a submatrix from a matrix using Mat::row, Mat::col, Mat::rowRange,"] + #[doc = "Mat::colRange, and others, the resultant submatrix points just to the part of the original big"] + #[doc = "matrix. However, each submatrix contains information (represented by datastart and dataend"] + #[doc = "fields) that helps reconstruct the original matrix size and the position of the extracted"] + #[doc = "submatrix within the original matrix. The method locateROI does exactly that."] + #[doc = "@param wholeSize Output parameter that contains the size of the whole matrix containing *this*"] + #[doc = "as a part."] + #[doc = "@param ofs Output parameter that contains an offset of *this* inside the whole matrix."] + #[link_name = "\u{1}_ZNK2cv3Mat9locateROIERNS_5Size_IiEERNS_6Point_IiEE"] + pub fn cv_Mat_locateROI(this: *const cv_Mat, wholeSize: *mut cv_Size, ofs: *mut cv_Point); +} +extern "C" { + #[doc = " @brief Adjusts a submatrix size and position within the parent matrix."] + #[doc = ""] + #[doc = "The method is complimentary to Mat::locateROI . The typical use of these functions is to determine"] + #[doc = "the submatrix position within the parent matrix and then shift the position somehow. Typically, it"] + #[doc = "can be required for filtering operations when pixels outside of the ROI should be taken into"] + #[doc = "account. When all the method parameters are positive, the ROI needs to grow in all directions by the"] + #[doc = "specified amount, for example:"] + #[doc = "@code"] + #[doc = "A.adjustROI(2, 2, 2, 2);"] + #[doc = "@endcode"] + #[doc = "In this example, the matrix size is increased by 4 elements in each direction. The matrix is shifted"] + #[doc = "by 2 elements to the left and 2 elements up, which brings in all the necessary pixels for the"] + #[doc = "filtering with the 5x5 kernel."] + #[doc = ""] + #[doc = "adjustROI forces the adjusted ROI to be inside of the parent matrix that is boundaries of the"] + #[doc = "adjusted ROI are constrained by boundaries of the parent matrix. For example, if the submatrix A is"] + #[doc = "located in the first row of a parent matrix and you called A.adjustROI(2, 2, 2, 2) then A will not"] + #[doc = "be increased in the upward direction."] + #[doc = ""] + #[doc = "The function is used internally by the OpenCV filtering functions, like filter2D , morphological"] + #[doc = "operations, and so on."] + #[doc = "@param dtop Shift of the top submatrix boundary upwards."] + #[doc = "@param dbottom Shift of the bottom submatrix boundary downwards."] + #[doc = "@param dleft Shift of the left submatrix boundary to the left."] + #[doc = "@param dright Shift of the right submatrix boundary to the right."] + #[doc = "@sa copyMakeBorder"] + #[link_name = "\u{1}_ZN2cv3Mat9adjustROIEiiii"] + pub fn cv_Mat_adjustROI( + this: *mut cv_Mat, + dtop: ::std::os::raw::c_int, + dbottom: ::std::os::raw::c_int, + dleft: ::std::os::raw::c_int, + dright: ::std::os::raw::c_int, + ) -> *mut cv_Mat; +} +extern "C" { + #[doc = " @brief Reports whether the matrix is continuous or not."] + #[doc = ""] + #[doc = "The method returns true if the matrix elements are stored continuously without gaps at the end of"] + #[doc = "each row. Otherwise, it returns false. Obviously, 1x1 or 1xN matrices are always continuous."] + #[doc = "Matrices created with Mat::create are always continuous. But if you extract a part of the matrix"] + #[doc = "using Mat::col, Mat::diag, and so on, or constructed a matrix header for externally allocated data,"] + #[doc = "such matrices may no longer have this property."] + #[doc = ""] + #[doc = "The continuity flag is stored as a bit in the Mat::flags field and is computed automatically when"] + #[doc = "you construct a matrix header. Thus, the continuity check is a very fast operation, though"] + #[doc = "theoretically it could be done as follows:"] + #[doc = "@code"] + #[doc = "bool myCheckMatContinuity(const Mat& m)"] + #[doc = "{"] + #[doc = "return m.rows == 1 || m.step == m.cols*m.elemSize();"] + #[doc = "}"] + #[doc = "@endcode"] + #[doc = "The method is used in quite a few of OpenCV functions. The point is that element-wise operations"] + #[doc = "(such as arithmetic and logical operations, math functions, alpha blending, color space"] + #[doc = "transformations, and others) do not depend on the image geometry. Thus, if all the input and output"] + #[doc = "arrays are continuous, the functions can process them as very long single-row vectors. The example"] + #[doc = "below illustrates how an alpha-blending function can be implemented:"] + #[doc = "@code"] + #[doc = "template"] + #[doc = "void alphaBlendRGBA(const Mat& src1, const Mat& src2, Mat& dst)"] + #[doc = "{"] + #[doc = "const float alpha_scale = (float)std::numeric_limits::max(),"] + #[doc = "inv_scale = 1.f/alpha_scale;"] + #[doc = ""] + #[doc = "CV_Assert( src1.type() == src2.type() &&"] + #[doc = "src1.type() == CV_MAKETYPE(traits::Depth::value, 4) &&"] + #[doc = "src1.size() == src2.size());"] + #[doc = "Size size = src1.size();"] + #[doc = "dst.create(size, src1.type());"] + #[doc = ""] + #[doc = "if( src1.isContinuous() && src2.isContinuous() && dst.isContinuous() )"] + #[doc = "{"] + #[doc = "size.width *= size.height;"] + #[doc = "size.height = 1;"] + #[doc = "}"] + #[doc = "size.width *= 4;"] + #[doc = ""] + #[doc = "for( int i = 0; i < size.height; i++ )"] + #[doc = "{"] + #[doc = "const T* ptr1 = src1.ptr(i);"] + #[doc = "const T* ptr2 = src2.ptr(i);"] + #[doc = "T* dptr = dst.ptr(i);"] + #[doc = ""] + #[doc = "for( int j = 0; j < size.width; j += 4 )"] + #[doc = "{"] + #[doc = "float alpha = ptr1[j+3]*inv_scale, beta = ptr2[j+3]*inv_scale;"] + #[doc = "dptr[j] = saturate_cast(ptr1[j]*alpha + ptr2[j]*beta);"] + #[doc = "dptr[j+1] = saturate_cast(ptr1[j+1]*alpha + ptr2[j+1]*beta);"] + #[doc = "dptr[j+2] = saturate_cast(ptr1[j+2]*alpha + ptr2[j+2]*beta);"] + #[doc = "dptr[j+3] = saturate_cast((1 - (1-alpha)*(1-beta))*alpha_scale);"] + #[doc = "}"] + #[doc = "}"] + #[doc = "}"] + #[doc = "@endcode"] + #[doc = "This approach, while being very simple, can boost the performance of a simple element-operation by"] + #[doc = "10-20 percents, especially if the image is rather small and the operation is quite simple."] + #[doc = ""] + #[doc = "Another OpenCV idiom in this function, a call of Mat::create for the destination array, that"] + #[doc = "allocates the destination array unless it already has the proper size and type. And while the newly"] + #[doc = "allocated arrays are always continuous, you still need to check the destination array because"] + #[doc = "Mat::create does not always allocate a new matrix."] + #[link_name = "\u{1}_ZNK2cv3Mat12isContinuousEv"] + pub fn cv_Mat_isContinuous(this: *const cv_Mat) -> bool; +} +extern "C" { + #[doc = "! returns true if the matrix is a submatrix of another matrix"] + #[link_name = "\u{1}_ZNK2cv3Mat11isSubmatrixEv"] + pub fn cv_Mat_isSubmatrix(this: *const cv_Mat) -> bool; +} +extern "C" { + #[doc = " @brief Returns the matrix element size in bytes."] + #[doc = ""] + #[doc = "The method returns the matrix element size in bytes. For example, if the matrix type is CV_16SC3 ,"] + #[doc = "the method returns 3\\*sizeof(short) or 6."] + #[link_name = "\u{1}_ZNK2cv3Mat8elemSizeEv"] + pub fn cv_Mat_elemSize(this: *const cv_Mat) -> size_t; +} +extern "C" { + #[doc = " @brief Returns the size of each matrix element channel in bytes."] + #[doc = ""] + #[doc = "The method returns the matrix element channel size in bytes, that is, it ignores the number of"] + #[doc = "channels. For example, if the matrix type is CV_16SC3 , the method returns sizeof(short) or 2."] + #[link_name = "\u{1}_ZNK2cv3Mat9elemSize1Ev"] + pub fn cv_Mat_elemSize1(this: *const cv_Mat) -> size_t; +} +extern "C" { + #[doc = " @brief Returns the type of a matrix element."] + #[doc = ""] + #[doc = "The method returns a matrix element type. This is an identifier compatible with the CvMat type"] + #[doc = "system, like CV_16SC3 or 16-bit signed 3-channel array, and so on."] + #[link_name = "\u{1}_ZNK2cv3Mat4typeEv"] + pub fn cv_Mat_type(this: *const cv_Mat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = " @brief Returns the depth of a matrix element."] + #[doc = ""] + #[doc = "The method returns the identifier of the matrix element depth (the type of each individual channel)."] + #[doc = "For example, for a 16-bit signed element array, the method returns CV_16S . A complete list of"] + #[doc = "matrix types contains the following values:"] + #[doc = "- CV_8U - 8-bit unsigned integers ( 0..255 )"] + #[doc = "- CV_8S - 8-bit signed integers ( -128..127 )"] + #[doc = "- CV_16U - 16-bit unsigned integers ( 0..65535 )"] + #[doc = "- CV_16S - 16-bit signed integers ( -32768..32767 )"] + #[doc = "- CV_32S - 32-bit signed integers ( -2147483648..2147483647 )"] + #[doc = "- CV_32F - 32-bit floating-point numbers ( -FLT_MAX..FLT_MAX, INF, NAN )"] + #[doc = "- CV_64F - 64-bit floating-point numbers ( -DBL_MAX..DBL_MAX, INF, NAN )"] + #[link_name = "\u{1}_ZNK2cv3Mat5depthEv"] + pub fn cv_Mat_depth(this: *const cv_Mat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = " @brief Returns the number of matrix channels."] + #[doc = ""] + #[doc = "The method returns the number of matrix channels."] + #[link_name = "\u{1}_ZNK2cv3Mat8channelsEv"] + pub fn cv_Mat_channels(this: *const cv_Mat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = " @brief Returns a normalized step."] + #[doc = ""] + #[doc = "The method returns a matrix step divided by Mat::elemSize1() . It can be useful to quickly access an"] + #[doc = "arbitrary matrix element."] + #[link_name = "\u{1}_ZNK2cv3Mat5step1Ei"] + pub fn cv_Mat_step1(this: *const cv_Mat, i: ::std::os::raw::c_int) -> size_t; +} +extern "C" { + #[doc = " @brief Returns true if the array has no elements."] + #[doc = ""] + #[doc = "The method returns true if Mat::total() is 0 or if Mat::data is NULL. Because of pop_back() and"] + #[doc = "resize() methods `M.total() == 0` does not imply that `M.data == NULL`."] + #[link_name = "\u{1}_ZNK2cv3Mat5emptyEv"] + pub fn cv_Mat_empty(this: *const cv_Mat) -> bool; +} +extern "C" { + #[doc = " @brief Returns the total number of array elements."] + #[doc = ""] + #[doc = "The method returns the number of array elements (a number of pixels if the array represents an"] + #[doc = "image)."] + #[link_name = "\u{1}_ZNK2cv3Mat5totalEv"] + pub fn cv_Mat_total(this: *const cv_Mat) -> size_t; +} +extern "C" { + #[doc = " @brief Returns the total number of array elements."] + #[doc = ""] + #[doc = "The method returns the number of elements within a certain sub-array slice with startDim <= dim < endDim"] + #[link_name = "\u{1}_ZNK2cv3Mat5totalEii"] + pub fn cv_Mat_total1( + this: *const cv_Mat, + startDim: ::std::os::raw::c_int, + endDim: ::std::os::raw::c_int, + ) -> size_t; +} +extern "C" { + #[doc = " @param elemChannels Number of channels or number of columns the matrix should have."] + #[doc = " For a 2-D matrix, when the matrix has only 1 column, then it should have"] + #[doc = " elemChannels channels; When the matrix has only 1 channel,"] + #[doc = " then it should have elemChannels columns."] + #[doc = " For a 3-D matrix, it should have only one channel. Furthermore,"] + #[doc = " if the number of planes is not one, then the number of rows"] + #[doc = " within every plane has to be 1; if the number of rows within"] + #[doc = " every plane is not 1, then the number of planes has to be 1."] + #[doc = " @param depth The depth the matrix should have. Set it to -1 when any depth is fine."] + #[doc = " @param requireContinuous Set it to true to require the matrix to be continuous"] + #[doc = " @return -1 if the requirement is not satisfied."] + #[doc = " Otherwise, it returns the number of elements in the matrix. Note"] + #[doc = " that an element may have multiple channels."] + #[doc = ""] + #[doc = " The following code demonstrates its usage for a 2-d matrix:"] + #[doc = " @snippet snippets/core_mat_checkVector.cpp example-2d"] + #[doc = ""] + #[doc = " The following code demonstrates its usage for a 3-d matrix:"] + #[doc = " @snippet snippets/core_mat_checkVector.cpp example-3d"] + #[link_name = "\u{1}_ZNK2cv3Mat11checkVectorEiib"] + pub fn cv_Mat_checkVector( + this: *const cv_Mat, + elemChannels: ::std::os::raw::c_int, + depth: ::std::os::raw::c_int, + requireContinuous: bool, + ) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = " @brief Returns a pointer to the specified matrix row."] + #[doc = ""] + #[doc = "The methods return `uchar*` or typed pointer to the specified matrix row. See the sample in"] + #[doc = "Mat::isContinuous to know how to use these methods."] + #[doc = "@param i0 A 0-based row index."] + #[link_name = "\u{1}_ZN2cv3Mat3ptrEi"] + pub fn cv_Mat_ptr(this: *mut cv_Mat, i0: ::std::os::raw::c_int) -> *mut uchar; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZNK2cv3Mat3ptrEi"] + pub fn cv_Mat_ptr1(this: *const cv_Mat, i0: ::std::os::raw::c_int) -> *const uchar; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param row Index along the dimension 0"] + #[doc = "@param col Index along the dimension 1"] + #[link_name = "\u{1}_ZN2cv3Mat3ptrEii"] + pub fn cv_Mat_ptr2( + this: *mut cv_Mat, + row: ::std::os::raw::c_int, + col: ::std::os::raw::c_int, + ) -> *mut uchar; +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param row Index along the dimension 0"] + #[doc = "@param col Index along the dimension 1"] + #[link_name = "\u{1}_ZNK2cv3Mat3ptrEii"] + pub fn cv_Mat_ptr3( + this: *const cv_Mat, + row: ::std::os::raw::c_int, + col: ::std::os::raw::c_int, + ) -> *const uchar; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZN2cv3Mat3ptrEiii"] + pub fn cv_Mat_ptr4( + this: *mut cv_Mat, + i0: ::std::os::raw::c_int, + i1: ::std::os::raw::c_int, + i2: ::std::os::raw::c_int, + ) -> *mut uchar; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZNK2cv3Mat3ptrEiii"] + pub fn cv_Mat_ptr5( + this: *const cv_Mat, + i0: ::std::os::raw::c_int, + i1: ::std::os::raw::c_int, + i2: ::std::os::raw::c_int, + ) -> *const uchar; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZN2cv3Mat3ptrEPKi"] + pub fn cv_Mat_ptr6(this: *mut cv_Mat, idx: *const ::std::os::raw::c_int) -> *mut uchar; +} +extern "C" { + #[doc = " @overload"] + #[link_name = "\u{1}_ZNK2cv3Mat3ptrEPKi"] + pub fn cv_Mat_ptr7(this: *const cv_Mat, idx: *const ::std::os::raw::c_int) -> *const uchar; +} +extern "C" { + #[doc = "! and the standard allocator"] + #[link_name = "\u{1}_ZN2cv3Mat15getStdAllocatorEv"] + pub fn cv_Mat_getStdAllocator() -> *mut cv_MatAllocator; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv3Mat19getDefaultAllocatorEv"] + pub fn cv_Mat_getDefaultAllocator() -> *mut cv_MatAllocator; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv3Mat19setDefaultAllocatorEPNS_12MatAllocatorE"] + pub fn cv_Mat_setDefaultAllocator(allocator: *mut cv_MatAllocator); +} +extern "C" { + #[doc = "! internal use method: updates the continuity flag"] + #[link_name = "\u{1}_ZN2cv3Mat20updateContinuityFlagEv"] + pub fn cv_Mat_updateContinuityFlag(this: *mut cv_Mat); +} +extern "C" { + #[doc = "These are various constructors that form a matrix. As noted in the AutomaticAllocation, often"] + #[doc = "the default constructor is enough, and the proper matrix will be allocated by an OpenCV function."] + #[doc = "The constructed matrix can further be assigned to another matrix or matrix expression or can be"] + #[doc = "allocated with Mat::create . In the former case, the old content is de-referenced."] + #[link_name = "\u{1}_ZN2cv3MatC1Ev"] + pub fn cv_Mat_Mat(this: *mut cv_Mat); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param rows Number of rows in a 2D array."] + #[doc = "@param cols Number of columns in a 2D array."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[link_name = "\u{1}_ZN2cv3MatC1Eiii"] + pub fn cv_Mat_Mat1( + this: *mut cv_Mat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size 2D array size: Size(cols, rows) . In the Size() constructor, the number of rows and the"] + #[doc = "number of columns go in the reverse order."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[link_name = "\u{1}_ZN2cv3MatC1ENS_5Size_IiEEi"] + pub fn cv_Mat_Mat2(this: *mut cv_Mat, size: cv_Size, type_: ::std::os::raw::c_int); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param rows Number of rows in a 2D array."] + #[doc = "@param cols Number of columns in a 2D array."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param s An optional value to initialize each matrix element with. To set all the matrix elements to"] + #[doc = "the particular value after the construction, use the assignment operator"] + #[doc = "Mat::operator=(const Scalar& value) ."] + #[link_name = "\u{1}_ZN2cv3MatC1EiiiRKNS_7Scalar_IdEE"] + pub fn cv_Mat_Mat3( + this: *mut cv_Mat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size 2D array size: Size(cols, rows) . In the Size() constructor, the number of rows and the"] + #[doc = "number of columns go in the reverse order."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param s An optional value to initialize each matrix element with. To set all the matrix elements to"] + #[doc = "the particular value after the construction, use the assignment operator"] + #[doc = "Mat::operator=(const Scalar& value) ."] + #[link_name = "\u{1}_ZN2cv3MatC1ENS_5Size_IiEEiRKNS_7Scalar_IdEE"] + pub fn cv_Mat_Mat4( + this: *mut cv_Mat, + size: cv_Size, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims Array dimensionality."] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[link_name = "\u{1}_ZN2cv3MatC1EiPKii"] + pub fn cv_Mat_Mat5( + this: *mut cv_Mat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKSt6vectorIiSaIiEEi"] + pub fn cv_Mat_Mat6( + this: *mut cv_Mat, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims Array dimensionality."] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param s An optional value to initialize each matrix element with. To set all the matrix elements to"] + #[doc = "the particular value after the construction, use the assignment operator"] + #[doc = "Mat::operator=(const Scalar& value) ."] + #[link_name = "\u{1}_ZN2cv3MatC1EiPKiiRKNS_7Scalar_IdEE"] + pub fn cv_Mat_Mat7( + this: *mut cv_Mat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param s An optional value to initialize each matrix element with. To set all the matrix elements to"] + #[doc = "the particular value after the construction, use the assignment operator"] + #[doc = "Mat::operator=(const Scalar& value) ."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKSt6vectorIiSaIiEEiRKNS_7Scalar_IdEE"] + pub fn cv_Mat_Mat8( + this: *mut cv_Mat, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Array that (as a whole or partly) is assigned to the constructed matrix. No data is copied"] + #[doc = "by these constructors. Instead, the header pointing to m data or its sub-array is constructed and"] + #[doc = "associated with it. The reference counter, if any, is incremented. So, when you modify the matrix"] + #[doc = "formed using such a constructor, you also modify the corresponding elements of m . If you want to"] + #[doc = "have an independent copy of the sub-array, use Mat::clone() ."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKS0_"] + pub fn cv_Mat_Mat9(this: *mut cv_Mat, m: *const cv_Mat); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param rows Number of rows in a 2D array."] + #[doc = "@param cols Number of columns in a 2D array."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param data Pointer to the user data. Matrix constructors that take data and step parameters do not"] + #[doc = "allocate matrix data. Instead, they just initialize the matrix header that points to the specified"] + #[doc = "data, which means that no data is copied. This operation is very efficient and can be used to"] + #[doc = "process external data using OpenCV functions. The external data is not automatically deallocated, so"] + #[doc = "you should take care of it."] + #[doc = "@param step Number of bytes each matrix row occupies. The value should include the padding bytes at"] + #[doc = "the end of each row, if any. If the parameter is missing (set to AUTO_STEP ), no padding is assumed"] + #[doc = "and the actual step is calculated as cols*elemSize(). See Mat::elemSize."] + #[link_name = "\u{1}_ZN2cv3MatC1EiiiPvm"] + pub fn cv_Mat_Mat10( + this: *mut cv_Mat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + step: size_t, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param size 2D array size: Size(cols, rows) . In the Size() constructor, the number of rows and the"] + #[doc = "number of columns go in the reverse order."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param data Pointer to the user data. Matrix constructors that take data and step parameters do not"] + #[doc = "allocate matrix data. Instead, they just initialize the matrix header that points to the specified"] + #[doc = "data, which means that no data is copied. This operation is very efficient and can be used to"] + #[doc = "process external data using OpenCV functions. The external data is not automatically deallocated, so"] + #[doc = "you should take care of it."] + #[doc = "@param step Number of bytes each matrix row occupies. The value should include the padding bytes at"] + #[doc = "the end of each row, if any. If the parameter is missing (set to AUTO_STEP ), no padding is assumed"] + #[doc = "and the actual step is calculated as cols*elemSize(). See Mat::elemSize."] + #[link_name = "\u{1}_ZN2cv3MatC1ENS_5Size_IiEEiPvm"] + pub fn cv_Mat_Mat11( + this: *mut cv_Mat, + size: cv_Size, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + step: size_t, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param ndims Array dimensionality."] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param data Pointer to the user data. Matrix constructors that take data and step parameters do not"] + #[doc = "allocate matrix data. Instead, they just initialize the matrix header that points to the specified"] + #[doc = "data, which means that no data is copied. This operation is very efficient and can be used to"] + #[doc = "process external data using OpenCV functions. The external data is not automatically deallocated, so"] + #[doc = "you should take care of it."] + #[doc = "@param steps Array of ndims-1 steps in case of a multi-dimensional array (the last step is always"] + #[doc = "set to the element size). If not specified, the matrix is assumed to be continuous."] + #[link_name = "\u{1}_ZN2cv3MatC1EiPKiiPvPKm"] + pub fn cv_Mat_Mat12( + this: *mut cv_Mat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + steps: *const size_t, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param sizes Array of integers specifying an n-dimensional array shape."] + #[doc = "@param type Array type. Use CV_8UC1, ..., CV_64FC4 to create 1-4 channel matrices, or"] + #[doc = "CV_8UC(n), ..., CV_64FC(n) to create multi-channel (up to CV_CN_MAX channels) matrices."] + #[doc = "@param data Pointer to the user data. Matrix constructors that take data and step parameters do not"] + #[doc = "allocate matrix data. Instead, they just initialize the matrix header that points to the specified"] + #[doc = "data, which means that no data is copied. This operation is very efficient and can be used to"] + #[doc = "process external data using OpenCV functions. The external data is not automatically deallocated, so"] + #[doc = "you should take care of it."] + #[doc = "@param steps Array of ndims-1 steps in case of a multi-dimensional array (the last step is always"] + #[doc = "set to the element size). If not specified, the matrix is assumed to be continuous."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKSt6vectorIiSaIiEEiPvPKm"] + pub fn cv_Mat_Mat13( + this: *mut cv_Mat, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + steps: *const size_t, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Array that (as a whole or partly) is assigned to the constructed matrix. No data is copied"] + #[doc = "by these constructors. Instead, the header pointing to m data or its sub-array is constructed and"] + #[doc = "associated with it. The reference counter, if any, is incremented. So, when you modify the matrix"] + #[doc = "formed using such a constructor, you also modify the corresponding elements of m . If you want to"] + #[doc = "have an independent copy of the sub-array, use Mat::clone() ."] + #[doc = "@param rowRange Range of the m rows to take. As usual, the range start is inclusive and the range"] + #[doc = "end is exclusive. Use Range::all() to take all the rows."] + #[doc = "@param colRange Range of the m columns to take. Use Range::all() to take all the columns."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKS0_RKNS_5RangeES5_"] + pub fn cv_Mat_Mat14( + this: *mut cv_Mat, + m: *const cv_Mat, + rowRange: *const cv_Range, + colRange: *const cv_Range, + ); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Array that (as a whole or partly) is assigned to the constructed matrix. No data is copied"] + #[doc = "by these constructors. Instead, the header pointing to m data or its sub-array is constructed and"] + #[doc = "associated with it. The reference counter, if any, is incremented. So, when you modify the matrix"] + #[doc = "formed using such a constructor, you also modify the corresponding elements of m . If you want to"] + #[doc = "have an independent copy of the sub-array, use Mat::clone() ."] + #[doc = "@param roi Region of interest."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKS0_RKNS_5Rect_IiEE"] + pub fn cv_Mat_Mat15(this: *mut cv_Mat, m: *const cv_Mat, roi: *const cv_Rect); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Array that (as a whole or partly) is assigned to the constructed matrix. No data is copied"] + #[doc = "by these constructors. Instead, the header pointing to m data or its sub-array is constructed and"] + #[doc = "associated with it. The reference counter, if any, is incremented. So, when you modify the matrix"] + #[doc = "formed using such a constructor, you also modify the corresponding elements of m . If you want to"] + #[doc = "have an independent copy of the sub-array, use Mat::clone() ."] + #[doc = "@param ranges Array of selected ranges of m along each dimensionality."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKS0_PKNS_5RangeE"] + pub fn cv_Mat_Mat16(this: *mut cv_Mat, m: *const cv_Mat, ranges: *const cv_Range); +} +extern "C" { + #[doc = " @overload"] + #[doc = "@param m Array that (as a whole or partly) is assigned to the constructed matrix. No data is copied"] + #[doc = "by these constructors. Instead, the header pointing to m data or its sub-array is constructed and"] + #[doc = "associated with it. The reference counter, if any, is incremented. So, when you modify the matrix"] + #[doc = "formed using such a constructor, you also modify the corresponding elements of m . If you want to"] + #[doc = "have an independent copy of the sub-array, use Mat::clone() ."] + #[doc = "@param ranges Array of selected ranges of m along each dimensionality."] + #[link_name = "\u{1}_ZN2cv3MatC1ERKS0_RKSt6vectorINS_5RangeESaIS4_EE"] + pub fn cv_Mat_Mat17(this: *mut cv_Mat, m: *const cv_Mat, ranges: *const [u64; 3usize]); +} +extern "C" { + #[doc = "! download data from GpuMat"] + #[link_name = "\u{1}_ZN2cv3MatC1ERKNS_4cuda6GpuMatE"] + pub fn cv_Mat_Mat18(this: *mut cv_Mat, m: *const cv_cuda_GpuMat); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv3MatC1EOS0_"] + pub fn cv_Mat_Mat19(this: *mut cv_Mat, m: *mut cv_Mat); +} +extern "C" { + #[doc = "! destructor - calls release()"] + #[link_name = "\u{1}_ZN2cv3MatD1Ev"] + pub fn cv_Mat_Mat_destructor(this: *mut cv_Mat); +} +impl cv_Mat { + #[inline] + pub unsafe fn getUMat( + &self, + accessFlags: cv_AccessFlag, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_Mat_getUMat(self, accessFlags, usageFlags) + } + #[inline] + pub unsafe fn row(&self, y: ::std::os::raw::c_int) -> cv_Mat { + cv_Mat_row(self, y) + } + #[inline] + pub unsafe fn col(&self, x: ::std::os::raw::c_int) -> cv_Mat { + cv_Mat_col(self, x) + } + #[inline] + pub unsafe fn rowRange( + &self, + startrow: ::std::os::raw::c_int, + endrow: ::std::os::raw::c_int, + ) -> cv_Mat { + cv_Mat_rowRange(self, startrow, endrow) + } + #[inline] + pub unsafe fn rowRange1(&self, r: *const cv_Range) -> cv_Mat { + cv_Mat_rowRange1(self, r) + } + #[inline] + pub unsafe fn colRange( + &self, + startcol: ::std::os::raw::c_int, + endcol: ::std::os::raw::c_int, + ) -> cv_Mat { + cv_Mat_colRange(self, startcol, endcol) + } + #[inline] + pub unsafe fn colRange1(&self, r: *const cv_Range) -> cv_Mat { + cv_Mat_colRange1(self, r) + } + #[inline] + pub unsafe fn diag(&self, d: ::std::os::raw::c_int) -> cv_Mat { + cv_Mat_diag(self, d) + } + #[inline] + pub unsafe fn diag1(d: *const cv_Mat) -> cv_Mat { + cv_Mat_diag1(d) + } + #[inline] + pub unsafe fn clone(&self) -> cv_Mat { + cv_Mat_clone(self) + } + #[inline] + pub unsafe fn copyTo(&self, m: cv_OutputArray) { + cv_Mat_copyTo(self, m) + } + #[inline] + pub unsafe fn copyTo1(&self, m: cv_OutputArray, mask: cv_InputArray) { + cv_Mat_copyTo1(self, m, mask) + } + #[inline] + pub unsafe fn convertTo( + &self, + m: cv_OutputArray, + rtype: ::std::os::raw::c_int, + alpha: f64, + beta: f64, + ) { + cv_Mat_convertTo(self, m, rtype, alpha, beta) + } + #[inline] + pub unsafe fn assignTo(&self, m: *mut cv_Mat, type_: ::std::os::raw::c_int) { + cv_Mat_assignTo(self, m, type_) + } + #[inline] + pub unsafe fn setTo(&mut self, value: cv_InputArray, mask: cv_InputArray) -> *mut cv_Mat { + cv_Mat_setTo(self, value, mask) + } + #[inline] + pub unsafe fn reshape(&self, cn: ::std::os::raw::c_int, rows: ::std::os::raw::c_int) -> cv_Mat { + cv_Mat_reshape(self, cn, rows) + } + #[inline] + pub unsafe fn reshape1( + &self, + cn: ::std::os::raw::c_int, + newndims: ::std::os::raw::c_int, + newsz: *const ::std::os::raw::c_int, + ) -> cv_Mat { + cv_Mat_reshape1(self, cn, newndims, newsz) + } + #[inline] + pub unsafe fn reshape2( + &self, + cn: ::std::os::raw::c_int, + newshape: *const [u64; 3usize], + ) -> cv_Mat { + cv_Mat_reshape2(self, cn, newshape) + } + #[inline] + pub unsafe fn t(&self) -> cv_MatExpr { + cv_Mat_t(self) + } + #[inline] + pub unsafe fn inv(&self, method: ::std::os::raw::c_int) -> cv_MatExpr { + cv_Mat_inv(self, method) + } + #[inline] + pub unsafe fn mul(&self, m: cv_InputArray, scale: f64) -> cv_MatExpr { + cv_Mat_mul(self, m, scale) + } + #[inline] + pub unsafe fn cross(&self, m: cv_InputArray) -> cv_Mat { + cv_Mat_cross(self, m) + } + #[inline] + pub unsafe fn dot(&self, m: cv_InputArray) -> f64 { + cv_Mat_dot(self, m) + } + #[inline] + pub unsafe fn zeros( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr { + cv_Mat_zeros(rows, cols, type_) + } + #[inline] + pub unsafe fn zeros1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr { + cv_Mat_zeros1(size, type_) + } + #[inline] + pub unsafe fn zeros2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr { + cv_Mat_zeros2(ndims, sz, type_) + } + #[inline] + pub unsafe fn ones( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr { + cv_Mat_ones(rows, cols, type_) + } + #[inline] + pub unsafe fn ones1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr { + cv_Mat_ones1(size, type_) + } + #[inline] + pub unsafe fn ones2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr { + cv_Mat_ones2(ndims, sz, type_) + } + #[inline] + pub unsafe fn eye( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> cv_MatExpr { + cv_Mat_eye(rows, cols, type_) + } + #[inline] + pub unsafe fn eye1(size: cv_Size, type_: ::std::os::raw::c_int) -> cv_MatExpr { + cv_Mat_eye1(size, type_) + } + #[inline] + pub unsafe fn create( + &mut self, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) { + cv_Mat_create(self, rows, cols, type_) + } + #[inline] + pub unsafe fn create1(&mut self, size: cv_Size, type_: ::std::os::raw::c_int) { + cv_Mat_create1(self, size, type_) + } + #[inline] + pub unsafe fn create2( + &mut self, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) { + cv_Mat_create2(self, ndims, sizes, type_) + } + #[inline] + pub unsafe fn create3(&mut self, sizes: *const [u64; 3usize], type_: ::std::os::raw::c_int) { + cv_Mat_create3(self, sizes, type_) + } + #[inline] + pub unsafe fn addref(&mut self) { + cv_Mat_addref(self) + } + #[inline] + pub unsafe fn release(&mut self) { + cv_Mat_release(self) + } + #[inline] + pub unsafe fn deallocate(&mut self) { + cv_Mat_deallocate(self) + } + #[inline] + pub unsafe fn copySize(&mut self, m: *const cv_Mat) { + cv_Mat_copySize(self, m) + } + #[inline] + pub unsafe fn reserve(&mut self, sz: size_t) { + cv_Mat_reserve(self, sz) + } + #[inline] + pub unsafe fn reserveBuffer(&mut self, sz: size_t) { + cv_Mat_reserveBuffer(self, sz) + } + #[inline] + pub unsafe fn resize(&mut self, sz: size_t) { + cv_Mat_resize(self, sz) + } + #[inline] + pub unsafe fn resize1(&mut self, sz: size_t, s: *const cv_Scalar) { + cv_Mat_resize1(self, sz, s) + } + #[inline] + pub unsafe fn push_back_(&mut self, elem: *const ::std::os::raw::c_void) { + cv_Mat_push_back_(self, elem) + } + #[inline] + pub unsafe fn push_back(&mut self, m: *const cv_Mat) { + cv_Mat_push_back(self, m) + } + #[inline] + pub unsafe fn pop_back(&mut self, nelems: size_t) { + cv_Mat_pop_back(self, nelems) + } + #[inline] + pub unsafe fn locateROI(&self, wholeSize: *mut cv_Size, ofs: *mut cv_Point) { + cv_Mat_locateROI(self, wholeSize, ofs) + } + #[inline] + pub unsafe fn adjustROI( + &mut self, + dtop: ::std::os::raw::c_int, + dbottom: ::std::os::raw::c_int, + dleft: ::std::os::raw::c_int, + dright: ::std::os::raw::c_int, + ) -> *mut cv_Mat { + cv_Mat_adjustROI(self, dtop, dbottom, dleft, dright) + } + #[inline] + pub unsafe fn isContinuous(&self) -> bool { + cv_Mat_isContinuous(self) + } + #[inline] + pub unsafe fn isSubmatrix(&self) -> bool { + cv_Mat_isSubmatrix(self) + } + #[inline] + pub unsafe fn elemSize(&self) -> size_t { + cv_Mat_elemSize(self) + } + #[inline] + pub unsafe fn elemSize1(&self) -> size_t { + cv_Mat_elemSize1(self) + } + #[inline] + pub unsafe fn type_(&self) -> ::std::os::raw::c_int { + cv_Mat_type(self) + } + #[inline] + pub unsafe fn depth(&self) -> ::std::os::raw::c_int { + cv_Mat_depth(self) + } + #[inline] + pub unsafe fn channels(&self) -> ::std::os::raw::c_int { + cv_Mat_channels(self) + } + #[inline] + pub unsafe fn step1(&self, i: ::std::os::raw::c_int) -> size_t { + cv_Mat_step1(self, i) + } + #[inline] + pub unsafe fn empty(&self) -> bool { + cv_Mat_empty(self) + } + #[inline] + pub unsafe fn total(&self) -> size_t { + cv_Mat_total(self) + } + #[inline] + pub unsafe fn total1( + &self, + startDim: ::std::os::raw::c_int, + endDim: ::std::os::raw::c_int, + ) -> size_t { + cv_Mat_total1(self, startDim, endDim) + } + #[inline] + pub unsafe fn checkVector( + &self, + elemChannels: ::std::os::raw::c_int, + depth: ::std::os::raw::c_int, + requireContinuous: bool, + ) -> ::std::os::raw::c_int { + cv_Mat_checkVector(self, elemChannels, depth, requireContinuous) + } + #[inline] + pub unsafe fn ptr(&mut self, i0: ::std::os::raw::c_int) -> *mut uchar { + cv_Mat_ptr(self, i0) + } + #[inline] + pub unsafe fn ptr1(&self, i0: ::std::os::raw::c_int) -> *const uchar { + cv_Mat_ptr1(self, i0) + } + #[inline] + pub unsafe fn ptr2( + &mut self, + row: ::std::os::raw::c_int, + col: ::std::os::raw::c_int, + ) -> *mut uchar { + cv_Mat_ptr2(self, row, col) + } + #[inline] + pub unsafe fn ptr3( + &self, + row: ::std::os::raw::c_int, + col: ::std::os::raw::c_int, + ) -> *const uchar { + cv_Mat_ptr3(self, row, col) + } + #[inline] + pub unsafe fn ptr4( + &mut self, + i0: ::std::os::raw::c_int, + i1: ::std::os::raw::c_int, + i2: ::std::os::raw::c_int, + ) -> *mut uchar { + cv_Mat_ptr4(self, i0, i1, i2) + } + #[inline] + pub unsafe fn ptr5( + &self, + i0: ::std::os::raw::c_int, + i1: ::std::os::raw::c_int, + i2: ::std::os::raw::c_int, + ) -> *const uchar { + cv_Mat_ptr5(self, i0, i1, i2) + } + #[inline] + pub unsafe fn ptr6(&mut self, idx: *const ::std::os::raw::c_int) -> *mut uchar { + cv_Mat_ptr6(self, idx) + } + #[inline] + pub unsafe fn ptr7(&self, idx: *const ::std::os::raw::c_int) -> *const uchar { + cv_Mat_ptr7(self, idx) + } + #[inline] + pub unsafe fn getStdAllocator() -> *mut cv_MatAllocator { + cv_Mat_getStdAllocator() + } + #[inline] + pub unsafe fn getDefaultAllocator() -> *mut cv_MatAllocator { + cv_Mat_getDefaultAllocator() + } + #[inline] + pub unsafe fn setDefaultAllocator(allocator: *mut cv_MatAllocator) { + cv_Mat_setDefaultAllocator(allocator) + } + #[inline] + pub unsafe fn updateContinuityFlag(&mut self) { + cv_Mat_updateContinuityFlag(self) + } + #[inline] + pub unsafe fn new() -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat(__bindgen_tmp.as_mut_ptr()); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new1( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat1(__bindgen_tmp.as_mut_ptr(), rows, cols, type_); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new2(size: cv_Size, type_: ::std::os::raw::c_int) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat2(__bindgen_tmp.as_mut_ptr(), size, type_); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new3( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat3(__bindgen_tmp.as_mut_ptr(), rows, cols, type_, s); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new4(size: cv_Size, type_: ::std::os::raw::c_int, s: *const cv_Scalar) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat4(__bindgen_tmp.as_mut_ptr(), size, type_, s); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new5( + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat5(__bindgen_tmp.as_mut_ptr(), ndims, sizes, type_); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new6(sizes: *const [u64; 3usize], type_: ::std::os::raw::c_int) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat6(__bindgen_tmp.as_mut_ptr(), sizes, type_); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new7( + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat7(__bindgen_tmp.as_mut_ptr(), ndims, sizes, type_, s); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new8( + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat8(__bindgen_tmp.as_mut_ptr(), sizes, type_, s); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new9(m: *const cv_Mat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat9(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new10( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + step: size_t, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat10(__bindgen_tmp.as_mut_ptr(), rows, cols, type_, data, step); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new11( + size: cv_Size, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + step: size_t, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat11(__bindgen_tmp.as_mut_ptr(), size, type_, data, step); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new12( + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + steps: *const size_t, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat12(__bindgen_tmp.as_mut_ptr(), ndims, sizes, type_, data, steps); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new13( + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + data: *mut ::std::os::raw::c_void, + steps: *const size_t, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat13(__bindgen_tmp.as_mut_ptr(), sizes, type_, data, steps); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new14( + m: *const cv_Mat, + rowRange: *const cv_Range, + colRange: *const cv_Range, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat14(__bindgen_tmp.as_mut_ptr(), m, rowRange, colRange); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new15(m: *const cv_Mat, roi: *const cv_Rect) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat15(__bindgen_tmp.as_mut_ptr(), m, roi); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new16(m: *const cv_Mat, ranges: *const cv_Range) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat16(__bindgen_tmp.as_mut_ptr(), m, ranges); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new17(m: *const cv_Mat, ranges: *const [u64; 3usize]) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat17(__bindgen_tmp.as_mut_ptr(), m, ranges); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new18(m: *const cv_cuda_GpuMat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat18(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new19(m: *mut cv_Mat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_Mat_Mat19(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + cv_Mat_Mat_destructor(self) + } +} +#[doc = " @todo document"] +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Copy, Clone)] +pub struct cv_UMat { + pub _bindgen_opaque_blob: [u64; 10usize], +} +pub const cv_UMat_MAGIC_VAL: ::std::os::raw::c_uint = 1124007936; +pub const cv_UMat_AUTO_STEP: ::std::os::raw::c_uint = 0; +pub const cv_UMat_CONTINUOUS_FLAG: ::std::os::raw::c_uint = 16384; +pub const cv_UMat_SUBMATRIX_FLAG: ::std::os::raw::c_uint = 32768; +pub type cv_UMat__bindgen_ty_1 = ::std::os::raw::c_uint; +pub const cv_UMat_MAGIC_MASK: ::std::os::raw::c_uint = 4294901760; +pub const cv_UMat_TYPE_MASK: ::std::os::raw::c_uint = 4095; +pub const cv_UMat_DEPTH_MASK: ::std::os::raw::c_uint = 7; +pub type cv_UMat__bindgen_ty_2 = ::std::os::raw::c_uint; +#[test] +fn bindgen_test_layout_cv_UMat() { + assert_eq!( + ::std::mem::size_of::(), + 80usize, + concat!("Size of: ", stringify!(cv_UMat)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_UMat)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat6getMatENS_10AccessFlagE"] + pub fn cv_UMat_getMat(this: *const cv_UMat, flags: cv_AccessFlag) -> cv_Mat; +} +extern "C" { + #[doc = "! returns a new matrix header for the specified row"] + #[link_name = "\u{1}_ZNK2cv4UMat3rowEi"] + pub fn cv_UMat_row(this: *const cv_UMat, y: ::std::os::raw::c_int) -> cv_UMat; +} +extern "C" { + #[doc = "! returns a new matrix header for the specified column"] + #[link_name = "\u{1}_ZNK2cv4UMat3colEi"] + pub fn cv_UMat_col(this: *const cv_UMat, x: ::std::os::raw::c_int) -> cv_UMat; +} +extern "C" { + #[doc = "! ... for the specified row span"] + #[link_name = "\u{1}_ZNK2cv4UMat8rowRangeEii"] + pub fn cv_UMat_rowRange( + this: *const cv_UMat, + startrow: ::std::os::raw::c_int, + endrow: ::std::os::raw::c_int, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat8rowRangeERKNS_5RangeE"] + pub fn cv_UMat_rowRange1(this: *const cv_UMat, r: *const cv_Range) -> cv_UMat; +} +extern "C" { + #[doc = "! ... for the specified column span"] + #[link_name = "\u{1}_ZNK2cv4UMat8colRangeEii"] + pub fn cv_UMat_colRange( + this: *const cv_UMat, + startcol: ::std::os::raw::c_int, + endcol: ::std::os::raw::c_int, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat8colRangeERKNS_5RangeE"] + pub fn cv_UMat_colRange1(this: *const cv_UMat, r: *const cv_Range) -> cv_UMat; +} +extern "C" { + #[doc = "! ... for the specified diagonal"] + #[doc = "! (d=0 - the main diagonal,"] + #[doc = "! >0 - a diagonal from the upper half,"] + #[doc = "! <0 - a diagonal from the lower half)"] + #[link_name = "\u{1}_ZNK2cv4UMat4diagEi"] + pub fn cv_UMat_diag(this: *const cv_UMat, d: ::std::os::raw::c_int) -> cv_UMat; +} +extern "C" { + #[doc = "! constructs a square diagonal matrix which main diagonal is vector \"d\""] + #[link_name = "\u{1}_ZN2cv4UMat4diagERKS0_NS_14UMatUsageFlagsE"] + pub fn cv_UMat_diag1(d: *const cv_UMat, usageFlags: cv_UMatUsageFlags) -> cv_UMat; +} +extern "C" { + #[doc = "! returns deep copy of the matrix, i.e. the data is copied"] + #[link_name = "\u{1}_ZNK2cv4UMat5cloneEv"] + pub fn cv_UMat_clone(this: *const cv_UMat) -> cv_UMat; +} +extern "C" { + #[doc = "! copies the matrix content to \"m\"."] + #[link_name = "\u{1}_ZNK2cv4UMat6copyToERKNS_12_OutputArrayE"] + pub fn cv_UMat_copyTo(this: *const cv_UMat, m: cv_OutputArray); +} +extern "C" { + #[doc = "! copies those matrix elements to \"m\" that are marked with non-zero mask elements."] + #[link_name = "\u{1}_ZNK2cv4UMat6copyToERKNS_12_OutputArrayERKNS_11_InputArrayE"] + pub fn cv_UMat_copyTo1(this: *const cv_UMat, m: cv_OutputArray, mask: cv_InputArray); +} +extern "C" { + #[doc = "! converts matrix to another datatype with optional scaling. See cvConvertScale."] + #[link_name = "\u{1}_ZNK2cv4UMat9convertToERKNS_12_OutputArrayEidd"] + pub fn cv_UMat_convertTo( + this: *const cv_UMat, + m: cv_OutputArray, + rtype: ::std::os::raw::c_int, + alpha: f64, + beta: f64, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat8assignToERS0_i"] + pub fn cv_UMat_assignTo(this: *const cv_UMat, m: *mut cv_UMat, type_: ::std::os::raw::c_int); +} +extern "C" { + #[doc = "! sets some of the matrix elements to s, according to the mask"] + #[link_name = "\u{1}_ZN2cv4UMat5setToERKNS_11_InputArrayES3_"] + pub fn cv_UMat_setTo( + this: *mut cv_UMat, + value: cv_InputArray, + mask: cv_InputArray, + ) -> *mut cv_UMat; +} +extern "C" { + #[doc = "! creates alternative matrix header for the same data, with different"] + #[link_name = "\u{1}_ZNK2cv4UMat7reshapeEii"] + pub fn cv_UMat_reshape( + this: *const cv_UMat, + cn: ::std::os::raw::c_int, + rows: ::std::os::raw::c_int, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat7reshapeEiiPKi"] + pub fn cv_UMat_reshape1( + this: *const cv_UMat, + cn: ::std::os::raw::c_int, + newndims: ::std::os::raw::c_int, + newsz: *const ::std::os::raw::c_int, + ) -> cv_UMat; +} +extern "C" { + #[doc = "! matrix transposition by means of matrix expressions"] + #[link_name = "\u{1}_ZNK2cv4UMat1tEv"] + pub fn cv_UMat_t(this: *const cv_UMat) -> cv_UMat; +} +extern "C" { + #[doc = "! matrix inversion by means of matrix expressions"] + #[link_name = "\u{1}_ZNK2cv4UMat3invEi"] + pub fn cv_UMat_inv(this: *const cv_UMat, method: ::std::os::raw::c_int) -> cv_UMat; +} +extern "C" { + #[doc = "! per-element matrix multiplication by means of matrix expressions"] + #[link_name = "\u{1}_ZNK2cv4UMat3mulERKNS_11_InputArrayEd"] + pub fn cv_UMat_mul(this: *const cv_UMat, m: cv_InputArray, scale: f64) -> cv_UMat; +} +extern "C" { + #[doc = "! computes dot-product"] + #[link_name = "\u{1}_ZNK2cv4UMat3dotERKNS_11_InputArrayE"] + pub fn cv_UMat_dot(this: *const cv_UMat, m: cv_InputArray) -> f64; +} +extern "C" { + #[doc = "! Matlab-style matrix initialization"] + #[link_name = "\u{1}_ZN2cv4UMat5zerosEiiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_zeros( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat5zerosENS_5Size_IiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_zeros1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat5zerosEiPKiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_zeros2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat4onesEiiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_ones( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat4onesENS_5Size_IiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_ones1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat4onesEiPKiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_ones2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat3eyeEiiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_eye( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat3eyeENS_5Size_IiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_eye1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat; +} +extern "C" { + #[doc = "! allocates new matrix data unless the matrix already has specified size and type."] + #[link_name = "\u{1}_ZN2cv4UMat6createEiiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_create( + this: *mut cv_UMat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat6createENS_5Size_IiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_create1( + this: *mut cv_UMat, + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat6createEiPKiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_create2( + this: *mut cv_UMat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMat6createERKSt6vectorIiSaIiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_create3( + this: *mut cv_UMat, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[doc = "! increases the reference counter; use with care to avoid memleaks"] + #[link_name = "\u{1}_ZN2cv4UMat6addrefEv"] + pub fn cv_UMat_addref(this: *mut cv_UMat); +} +extern "C" { + #[doc = "! decreases reference counter;"] + #[link_name = "\u{1}_ZN2cv4UMat7releaseEv"] + pub fn cv_UMat_release(this: *mut cv_UMat); +} +extern "C" { + #[doc = "! deallocates the matrix data"] + #[link_name = "\u{1}_ZN2cv4UMat10deallocateEv"] + pub fn cv_UMat_deallocate(this: *mut cv_UMat); +} +extern "C" { + #[doc = "! internal use function; properly re-allocates _size, _step arrays"] + #[link_name = "\u{1}_ZN2cv4UMat8copySizeERKS0_"] + pub fn cv_UMat_copySize(this: *mut cv_UMat, m: *const cv_UMat); +} +extern "C" { + #[doc = "! locates matrix header within a parent matrix. See below"] + #[link_name = "\u{1}_ZNK2cv4UMat9locateROIERNS_5Size_IiEERNS_6Point_IiEE"] + pub fn cv_UMat_locateROI(this: *const cv_UMat, wholeSize: *mut cv_Size, ofs: *mut cv_Point); +} +extern "C" { + #[doc = "! moves/resizes the current matrix ROI inside the parent matrix."] + #[link_name = "\u{1}_ZN2cv4UMat9adjustROIEiiii"] + pub fn cv_UMat_adjustROI( + this: *mut cv_UMat, + dtop: ::std::os::raw::c_int, + dbottom: ::std::os::raw::c_int, + dleft: ::std::os::raw::c_int, + dright: ::std::os::raw::c_int, + ) -> *mut cv_UMat; +} +extern "C" { + #[doc = "! returns true iff the matrix data is continuous"] + #[link_name = "\u{1}_ZNK2cv4UMat12isContinuousEv"] + pub fn cv_UMat_isContinuous(this: *const cv_UMat) -> bool; +} +extern "C" { + #[doc = "! returns true if the matrix is a submatrix of another matrix"] + #[link_name = "\u{1}_ZNK2cv4UMat11isSubmatrixEv"] + pub fn cv_UMat_isSubmatrix(this: *const cv_UMat) -> bool; +} +extern "C" { + #[doc = "! returns element size in bytes,"] + #[link_name = "\u{1}_ZNK2cv4UMat8elemSizeEv"] + pub fn cv_UMat_elemSize(this: *const cv_UMat) -> size_t; +} +extern "C" { + #[doc = "! returns the size of element channel in bytes."] + #[link_name = "\u{1}_ZNK2cv4UMat9elemSize1Ev"] + pub fn cv_UMat_elemSize1(this: *const cv_UMat) -> size_t; +} +extern "C" { + #[doc = "! returns element type, similar to CV_MAT_TYPE(cvmat->type)"] + #[link_name = "\u{1}_ZNK2cv4UMat4typeEv"] + pub fn cv_UMat_type(this: *const cv_UMat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = "! returns element type, similar to CV_MAT_DEPTH(cvmat->type)"] + #[link_name = "\u{1}_ZNK2cv4UMat5depthEv"] + pub fn cv_UMat_depth(this: *const cv_UMat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = "! returns element type, similar to CV_MAT_CN(cvmat->type)"] + #[link_name = "\u{1}_ZNK2cv4UMat8channelsEv"] + pub fn cv_UMat_channels(this: *const cv_UMat) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = "! returns step/elemSize1()"] + #[link_name = "\u{1}_ZNK2cv4UMat5step1Ei"] + pub fn cv_UMat_step1(this: *const cv_UMat, i: ::std::os::raw::c_int) -> size_t; +} +extern "C" { + #[doc = "! returns true if matrix data is NULL"] + #[link_name = "\u{1}_ZNK2cv4UMat5emptyEv"] + pub fn cv_UMat_empty(this: *const cv_UMat) -> bool; +} +extern "C" { + #[doc = "! returns the total number of matrix elements"] + #[link_name = "\u{1}_ZNK2cv4UMat5totalEv"] + pub fn cv_UMat_total(this: *const cv_UMat) -> size_t; +} +extern "C" { + #[doc = "! returns N if the matrix is 1-channel (N x ptdim) or ptdim-channel (1 x N) or (N x 1); negative number otherwise"] + #[link_name = "\u{1}_ZNK2cv4UMat11checkVectorEiib"] + pub fn cv_UMat_checkVector( + this: *const cv_UMat, + elemChannels: ::std::os::raw::c_int, + depth: ::std::os::raw::c_int, + requireContinuous: bool, + ) -> ::std::os::raw::c_int; +} +extern "C" { + #[doc = " Returns the OpenCL buffer handle on which UMat operates on."] + #[doc = "The UMat instance should be kept alive during the use of the handle to prevent the buffer to be"] + #[doc = "returned to the OpenCV buffer pool."] + #[link_name = "\u{1}_ZNK2cv4UMat6handleENS_10AccessFlagE"] + pub fn cv_UMat_handle( + this: *const cv_UMat, + accessFlags: cv_AccessFlag, + ) -> *mut ::std::os::raw::c_void; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv4UMat8ndoffsetEPm"] + pub fn cv_UMat_ndoffset(this: *const cv_UMat, ofs: *mut size_t); +} +extern "C" { + #[doc = "! and the standard allocator"] + #[link_name = "\u{1}_ZN2cv4UMat15getStdAllocatorEv"] + pub fn cv_UMat_getStdAllocator() -> *mut cv_MatAllocator; +} +extern "C" { + #[doc = "! internal use method: updates the continuity flag"] + #[link_name = "\u{1}_ZN2cv4UMat20updateContinuityFlagEv"] + pub fn cv_UMat_updateContinuityFlag(this: *mut cv_UMat); +} +extern "C" { + #[doc = "! default constructor"] + #[link_name = "\u{1}_ZN2cv4UMatC1ENS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat(this: *mut cv_UMat, usageFlags: cv_UMatUsageFlags); +} +extern "C" { + #[doc = "! constructs 2D matrix of the specified size and type"] + #[link_name = "\u{1}_ZN2cv4UMatC1EiiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat1( + this: *mut cv_UMat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1ENS_5Size_IiEEiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat2( + this: *mut cv_UMat, + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[doc = "! constructs 2D matrix and fills it with the specified value _s."] + #[link_name = "\u{1}_ZN2cv4UMatC1EiiiRKNS_7Scalar_IdEENS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat3( + this: *mut cv_UMat, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1ENS_5Size_IiEEiRKNS_7Scalar_IdEENS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat4( + this: *mut cv_UMat, + size: cv_Size, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[doc = "! constructs n-dimensional matrix"] + #[link_name = "\u{1}_ZN2cv4UMatC1EiPKiiNS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat5( + this: *mut cv_UMat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1EiPKiiRKNS_7Scalar_IdEENS_14UMatUsageFlagsE"] + pub fn cv_UMat_UMat6( + this: *mut cv_UMat, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ); +} +extern "C" { + #[doc = "! copy constructor"] + #[link_name = "\u{1}_ZN2cv4UMatC1ERKS0_"] + pub fn cv_UMat_UMat7(this: *mut cv_UMat, m: *const cv_UMat); +} +extern "C" { + #[doc = "! creates a matrix header for a part of the bigger matrix"] + #[link_name = "\u{1}_ZN2cv4UMatC1ERKS0_RKNS_5RangeES5_"] + pub fn cv_UMat_UMat8( + this: *mut cv_UMat, + m: *const cv_UMat, + rowRange: *const cv_Range, + colRange: *const cv_Range, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1ERKS0_RKNS_5Rect_IiEE"] + pub fn cv_UMat_UMat9(this: *mut cv_UMat, m: *const cv_UMat, roi: *const cv_Rect); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1ERKS0_PKNS_5RangeE"] + pub fn cv_UMat_UMat10(this: *mut cv_UMat, m: *const cv_UMat, ranges: *const cv_Range); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1ERKS0_RKSt6vectorINS_5RangeESaIS4_EE"] + pub fn cv_UMat_UMat11(this: *mut cv_UMat, m: *const cv_UMat, ranges: *const [u64; 3usize]); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv4UMatC1EOS0_"] + pub fn cv_UMat_UMat12(this: *mut cv_UMat, m: *mut cv_UMat); +} +extern "C" { + #[doc = "! destructor - calls release()"] + #[link_name = "\u{1}_ZN2cv4UMatD1Ev"] + pub fn cv_UMat_UMat_destructor(this: *mut cv_UMat); +} +impl cv_UMat { + #[inline] + pub unsafe fn getMat(&self, flags: cv_AccessFlag) -> cv_Mat { + cv_UMat_getMat(self, flags) + } + #[inline] + pub unsafe fn row(&self, y: ::std::os::raw::c_int) -> cv_UMat { + cv_UMat_row(self, y) + } + #[inline] + pub unsafe fn col(&self, x: ::std::os::raw::c_int) -> cv_UMat { + cv_UMat_col(self, x) + } + #[inline] + pub unsafe fn rowRange( + &self, + startrow: ::std::os::raw::c_int, + endrow: ::std::os::raw::c_int, + ) -> cv_UMat { + cv_UMat_rowRange(self, startrow, endrow) + } + #[inline] + pub unsafe fn rowRange1(&self, r: *const cv_Range) -> cv_UMat { + cv_UMat_rowRange1(self, r) + } + #[inline] + pub unsafe fn colRange( + &self, + startcol: ::std::os::raw::c_int, + endcol: ::std::os::raw::c_int, + ) -> cv_UMat { + cv_UMat_colRange(self, startcol, endcol) + } + #[inline] + pub unsafe fn colRange1(&self, r: *const cv_Range) -> cv_UMat { + cv_UMat_colRange1(self, r) + } + #[inline] + pub unsafe fn diag(&self, d: ::std::os::raw::c_int) -> cv_UMat { + cv_UMat_diag(self, d) + } + #[inline] + pub unsafe fn diag1(d: *const cv_UMat, usageFlags: cv_UMatUsageFlags) -> cv_UMat { + cv_UMat_diag1(d, usageFlags) + } + #[inline] + pub unsafe fn clone(&self) -> cv_UMat { + cv_UMat_clone(self) + } + #[inline] + pub unsafe fn copyTo(&self, m: cv_OutputArray) { + cv_UMat_copyTo(self, m) + } + #[inline] + pub unsafe fn copyTo1(&self, m: cv_OutputArray, mask: cv_InputArray) { + cv_UMat_copyTo1(self, m, mask) + } + #[inline] + pub unsafe fn convertTo( + &self, + m: cv_OutputArray, + rtype: ::std::os::raw::c_int, + alpha: f64, + beta: f64, + ) { + cv_UMat_convertTo(self, m, rtype, alpha, beta) + } + #[inline] + pub unsafe fn assignTo(&self, m: *mut cv_UMat, type_: ::std::os::raw::c_int) { + cv_UMat_assignTo(self, m, type_) + } + #[inline] + pub unsafe fn setTo(&mut self, value: cv_InputArray, mask: cv_InputArray) -> *mut cv_UMat { + cv_UMat_setTo(self, value, mask) + } + #[inline] + pub unsafe fn reshape( + &self, + cn: ::std::os::raw::c_int, + rows: ::std::os::raw::c_int, + ) -> cv_UMat { + cv_UMat_reshape(self, cn, rows) + } + #[inline] + pub unsafe fn reshape1( + &self, + cn: ::std::os::raw::c_int, + newndims: ::std::os::raw::c_int, + newsz: *const ::std::os::raw::c_int, + ) -> cv_UMat { + cv_UMat_reshape1(self, cn, newndims, newsz) + } + #[inline] + pub unsafe fn t(&self) -> cv_UMat { + cv_UMat_t(self) + } + #[inline] + pub unsafe fn inv(&self, method: ::std::os::raw::c_int) -> cv_UMat { + cv_UMat_inv(self, method) + } + #[inline] + pub unsafe fn mul(&self, m: cv_InputArray, scale: f64) -> cv_UMat { + cv_UMat_mul(self, m, scale) + } + #[inline] + pub unsafe fn dot(&self, m: cv_InputArray) -> f64 { + cv_UMat_dot(self, m) + } + #[inline] + pub unsafe fn zeros( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_zeros(rows, cols, type_, usageFlags) + } + #[inline] + pub unsafe fn zeros1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_zeros1(size, type_, usageFlags) + } + #[inline] + pub unsafe fn zeros2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_zeros2(ndims, sz, type_, usageFlags) + } + #[inline] + pub unsafe fn ones( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_ones(rows, cols, type_, usageFlags) + } + #[inline] + pub unsafe fn ones1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_ones1(size, type_, usageFlags) + } + #[inline] + pub unsafe fn ones2( + ndims: ::std::os::raw::c_int, + sz: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_ones2(ndims, sz, type_, usageFlags) + } + #[inline] + pub unsafe fn eye( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_eye(rows, cols, type_, usageFlags) + } + #[inline] + pub unsafe fn eye1( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> cv_UMat { + cv_UMat_eye1(size, type_, usageFlags) + } + #[inline] + pub unsafe fn create( + &mut self, + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) { + cv_UMat_create(self, rows, cols, type_, usageFlags) + } + #[inline] + pub unsafe fn create1( + &mut self, + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) { + cv_UMat_create1(self, size, type_, usageFlags) + } + #[inline] + pub unsafe fn create2( + &mut self, + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) { + cv_UMat_create2(self, ndims, sizes, type_, usageFlags) + } + #[inline] + pub unsafe fn create3( + &mut self, + sizes: *const [u64; 3usize], + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) { + cv_UMat_create3(self, sizes, type_, usageFlags) + } + #[inline] + pub unsafe fn addref(&mut self) { + cv_UMat_addref(self) + } + #[inline] + pub unsafe fn release(&mut self) { + cv_UMat_release(self) + } + #[inline] + pub unsafe fn deallocate(&mut self) { + cv_UMat_deallocate(self) + } + #[inline] + pub unsafe fn copySize(&mut self, m: *const cv_UMat) { + cv_UMat_copySize(self, m) + } + #[inline] + pub unsafe fn locateROI(&self, wholeSize: *mut cv_Size, ofs: *mut cv_Point) { + cv_UMat_locateROI(self, wholeSize, ofs) + } + #[inline] + pub unsafe fn adjustROI( + &mut self, + dtop: ::std::os::raw::c_int, + dbottom: ::std::os::raw::c_int, + dleft: ::std::os::raw::c_int, + dright: ::std::os::raw::c_int, + ) -> *mut cv_UMat { + cv_UMat_adjustROI(self, dtop, dbottom, dleft, dright) + } + #[inline] + pub unsafe fn isContinuous(&self) -> bool { + cv_UMat_isContinuous(self) + } + #[inline] + pub unsafe fn isSubmatrix(&self) -> bool { + cv_UMat_isSubmatrix(self) + } + #[inline] + pub unsafe fn elemSize(&self) -> size_t { + cv_UMat_elemSize(self) + } + #[inline] + pub unsafe fn elemSize1(&self) -> size_t { + cv_UMat_elemSize1(self) + } + #[inline] + pub unsafe fn type_(&self) -> ::std::os::raw::c_int { + cv_UMat_type(self) + } + #[inline] + pub unsafe fn depth(&self) -> ::std::os::raw::c_int { + cv_UMat_depth(self) + } + #[inline] + pub unsafe fn channels(&self) -> ::std::os::raw::c_int { + cv_UMat_channels(self) + } + #[inline] + pub unsafe fn step1(&self, i: ::std::os::raw::c_int) -> size_t { + cv_UMat_step1(self, i) + } + #[inline] + pub unsafe fn empty(&self) -> bool { + cv_UMat_empty(self) + } + #[inline] + pub unsafe fn total(&self) -> size_t { + cv_UMat_total(self) + } + #[inline] + pub unsafe fn checkVector( + &self, + elemChannels: ::std::os::raw::c_int, + depth: ::std::os::raw::c_int, + requireContinuous: bool, + ) -> ::std::os::raw::c_int { + cv_UMat_checkVector(self, elemChannels, depth, requireContinuous) + } + #[inline] + pub unsafe fn handle(&self, accessFlags: cv_AccessFlag) -> *mut ::std::os::raw::c_void { + cv_UMat_handle(self, accessFlags) + } + #[inline] + pub unsafe fn ndoffset(&self, ofs: *mut size_t) { + cv_UMat_ndoffset(self, ofs) + } + #[inline] + pub unsafe fn getStdAllocator() -> *mut cv_MatAllocator { + cv_UMat_getStdAllocator() + } + #[inline] + pub unsafe fn updateContinuityFlag(&mut self) { + cv_UMat_updateContinuityFlag(self) + } + #[inline] + pub unsafe fn new(usageFlags: cv_UMatUsageFlags) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat(__bindgen_tmp.as_mut_ptr(), usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new1( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat1(__bindgen_tmp.as_mut_ptr(), rows, cols, type_, usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new2( + size: cv_Size, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat2(__bindgen_tmp.as_mut_ptr(), size, type_, usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new3( + rows: ::std::os::raw::c_int, + cols: ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat3(__bindgen_tmp.as_mut_ptr(), rows, cols, type_, s, usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new4( + size: cv_Size, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat4(__bindgen_tmp.as_mut_ptr(), size, type_, s, usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new5( + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat5(__bindgen_tmp.as_mut_ptr(), ndims, sizes, type_, usageFlags); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new6( + ndims: ::std::os::raw::c_int, + sizes: *const ::std::os::raw::c_int, + type_: ::std::os::raw::c_int, + s: *const cv_Scalar, + usageFlags: cv_UMatUsageFlags, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat6( + __bindgen_tmp.as_mut_ptr(), + ndims, + sizes, + type_, + s, + usageFlags, + ); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new7(m: *const cv_UMat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat7(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new8( + m: *const cv_UMat, + rowRange: *const cv_Range, + colRange: *const cv_Range, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat8(__bindgen_tmp.as_mut_ptr(), m, rowRange, colRange); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new9(m: *const cv_UMat, roi: *const cv_Rect) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat9(__bindgen_tmp.as_mut_ptr(), m, roi); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new10(m: *const cv_UMat, ranges: *const cv_Range) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat10(__bindgen_tmp.as_mut_ptr(), m, ranges); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new11(m: *const cv_UMat, ranges: *const [u64; 3usize]) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat11(__bindgen_tmp.as_mut_ptr(), m, ranges); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new12(m: *mut cv_UMat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_UMat_UMat12(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + cv_UMat_UMat_destructor(self) + } +} +#[doc = " Matrix Expressions /////////////////////////////////"] +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Copy, Clone)] +pub struct cv_MatOp { + pub _bindgen_opaque_blob: u64, +} +#[test] +fn bindgen_test_layout_cv_MatOp() { + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(cv_MatOp)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_MatOp)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv5MatOpC2Ev"] + pub fn cv_MatOp_MatOp(this: *mut cv_MatOp); +} +impl cv_MatOp { + #[inline] + pub unsafe fn new() -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_MatOp_MatOp(__bindgen_tmp.as_mut_ptr()); + __bindgen_tmp.assume_init() + } +} +extern "C" { + #[link_name = "\u{1}_ZN2cv5MatOpD1Ev"] + pub fn cv_MatOp_MatOp_destructor(this: *mut cv_MatOp); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp11elementWiseERKNS_7MatExprE"] + pub fn cv_MatOp_elementWise(this: *mut ::std::os::raw::c_void, expr: *const cv_MatExpr) + -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp3roiERKNS_7MatExprERKNS_5RangeES6_RS1_"] + pub fn cv_MatOp_roi( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + rowRange: *const cv_Range, + colRange: *const cv_Range, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp4diagERKNS_7MatExprEiRS1_"] + pub fn cv_MatOp_diag( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + d: ::std::os::raw::c_int, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp12augAssignAddERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignAdd( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp17augAssignSubtractERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignSubtract( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp17augAssignMultiplyERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignMultiply( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp15augAssignDivideERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignDivide( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp12augAssignAndERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignAnd( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp11augAssignOrERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignOr( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp12augAssignXorERKNS_7MatExprERNS_3MatE"] + pub fn cv_MatOp_augAssignXor( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + m: *mut cv_Mat, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp3addERKNS_7MatExprES3_RS1_"] + pub fn cv_MatOp_add( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + expr2: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp3addERKNS_7MatExprERKNS_7Scalar_IdEERS1_"] + pub fn cv_MatOp_add1( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + s: *const cv_Scalar, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp8subtractERKNS_7MatExprES3_RS1_"] + pub fn cv_MatOp_subtract( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + expr2: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp8subtractERKNS_7Scalar_IdEERKNS_7MatExprERS5_"] + pub fn cv_MatOp_subtract1( + this: *mut ::std::os::raw::c_void, + s: *const cv_Scalar, + expr: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp8multiplyERKNS_7MatExprES3_RS1_d"] + pub fn cv_MatOp_multiply( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + expr2: *const cv_MatExpr, + res: *mut cv_MatExpr, + scale: f64, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp8multiplyERKNS_7MatExprEdRS1_"] + pub fn cv_MatOp_multiply1( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + s: f64, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp6divideERKNS_7MatExprES3_RS1_d"] + pub fn cv_MatOp_divide( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + expr2: *const cv_MatExpr, + res: *mut cv_MatExpr, + scale: f64, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp6divideEdRKNS_7MatExprERS1_"] + pub fn cv_MatOp_divide1( + this: *mut ::std::os::raw::c_void, + s: f64, + expr: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp3absERKNS_7MatExprERS1_"] + pub fn cv_MatOp_abs( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp9transposeERKNS_7MatExprERS1_"] + pub fn cv_MatOp_transpose( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp6matmulERKNS_7MatExprES3_RS1_"] + pub fn cv_MatOp_matmul( + this: *mut ::std::os::raw::c_void, + expr1: *const cv_MatExpr, + expr2: *const cv_MatExpr, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp6invertERKNS_7MatExprEiRS1_"] + pub fn cv_MatOp_invert( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + method: ::std::os::raw::c_int, + res: *mut cv_MatExpr, + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp4sizeERKNS_7MatExprE"] + pub fn cv_MatOp_size(this: *mut ::std::os::raw::c_void, expr: *const cv_MatExpr) -> cv_Size; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv5MatOp4typeERKNS_7MatExprE"] + pub fn cv_MatOp_type( + this: *mut ::std::os::raw::c_void, + expr: *const cv_MatExpr, + ) -> ::std::os::raw::c_int; +} +#[doc = " @brief Matrix expression representation"] +#[doc = "@anchor MatrixExpressions"] +#[doc = "This is a list of implemented matrix operations that can be combined in arbitrary complex"] +#[doc = "expressions (here A, B stand for matrices ( Mat ), s for a scalar ( Scalar ), alpha for a"] +#[doc = "real-valued scalar ( double )):"] +#[doc = "- Addition, subtraction, negation: `A+B`, `A-B`, `A+s`, `A-s`, `s+A`, `s-A`, `-A`"] +#[doc = "- Scaling: `A*alpha`"] +#[doc = "- Per-element multiplication and division: `A.mul(B)`, `A/B`, `alpha/A`"] +#[doc = "- Matrix multiplication: `A*B`"] +#[doc = "- Transposition: `A.t()` (means AT)"] +#[doc = "- Matrix inversion and pseudo-inversion, solving linear systems and least-squares problems:"] +#[doc = "`A.inv([method]) (~ A-1)`, `A.inv([method])*B (~ X: AX=B)`"] +#[doc = "- Comparison: `A cmpop B`, `A cmpop alpha`, `alpha cmpop A`, where *cmpop* is one of"] +#[doc = "`>`, `>=`, `==`, `!=`, `<=`, `<`. The result of comparison is an 8-bit single channel mask whose"] +#[doc = "elements are set to 255 (if the particular element or pair of elements satisfy the condition) or"] +#[doc = "0."] +#[doc = "- Bitwise logical operations: `A logicop B`, `A logicop s`, `s logicop A`, `~A`, where *logicop* is one of"] +#[doc = "`&`, `|`, `^`."] +#[doc = "- Element-wise minimum and maximum: `min(A, B)`, `min(A, alpha)`, `max(A, B)`, `max(A, alpha)`"] +#[doc = "- Element-wise absolute value: `abs(A)`"] +#[doc = "- Cross-product, dot-product: `A.cross(B)`, `A.dot(B)`"] +#[doc = "- Any function of matrix or matrices and scalars that returns a matrix or a scalar, such as norm,"] +#[doc = "mean, sum, countNonZero, trace, determinant, repeat, and others."] +#[doc = "- Matrix initializers ( Mat::eye(), Mat::zeros(), Mat::ones() ), matrix comma-separated"] +#[doc = "initializers, matrix constructors and operators that extract sub-matrices (see Mat description)."] +#[doc = "- Mat_() constructors to cast the result to the proper type."] +#[doc = "@note Comma-separated initializers and probably some other operations may require additional"] +#[doc = "explicit Mat() or Mat_() constructor calls to resolve a possible ambiguity."] +#[doc = ""] +#[doc = "Here are examples of matrix expressions:"] +#[doc = "@code"] +#[doc = "SVD svd(A);"] +#[doc = "Mat pinvA = svd.vt.t()*Mat::diag(1./svd.w)*svd.u.t();"] +#[doc = ""] +#[doc = "x -= (A.t()*A + lambda*Mat::eye(A.cols,A.cols,A.type())).inv(DECOMP_CHOLESKY)*(A.t()*err);"] +#[doc = ""] +#[doc = "Mat blurred; double sigma = 1, threshold = 5, amount = 1;"] +#[doc = "GaussianBlur(img, blurred, Size(), sigma, sigma);"] +#[doc = "Mat lowContrastMask = abs(img - blurred) < threshold;"] +#[doc = "Mat sharpened = img*(1+amount) + blurred*(-amount);"] +#[doc = "img.copyTo(sharpened, lowContrastMask);"] +#[doc = "@endcode"] +#[repr(C)] +#[repr(align(8))] +pub struct cv_MatExpr { + pub _bindgen_opaque_blob: [u64; 44usize], +} +#[test] +fn bindgen_test_layout_cv_MatExpr() { + assert_eq!( + ::std::mem::size_of::(), + 352usize, + concat!("Size of: ", stringify!(cv_MatExpr)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(cv_MatExpr)) + ); +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr4sizeEv"] + pub fn cv_MatExpr_size(this: *const cv_MatExpr) -> cv_Size; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr4typeEv"] + pub fn cv_MatExpr_type(this: *const cv_MatExpr) -> ::std::os::raw::c_int; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3rowEi"] + pub fn cv_MatExpr_row(this: *const cv_MatExpr, y: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3colEi"] + pub fn cv_MatExpr_col(this: *const cv_MatExpr, x: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr4diagEi"] + pub fn cv_MatExpr_diag(this: *const cv_MatExpr, d: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr1tEv"] + pub fn cv_MatExpr_t(this: *const cv_MatExpr) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3invEi"] + pub fn cv_MatExpr_inv(this: *const cv_MatExpr, method: ::std::os::raw::c_int) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3mulERKS0_d"] + pub fn cv_MatExpr_mul(this: *const cv_MatExpr, e: *const cv_MatExpr, scale: f64) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3mulERKNS_3MatEd"] + pub fn cv_MatExpr_mul1(this: *const cv_MatExpr, m: *const cv_Mat, scale: f64) -> cv_MatExpr; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr5crossERKNS_3MatE"] + pub fn cv_MatExpr_cross(this: *const cv_MatExpr, m: *const cv_Mat) -> cv_Mat; +} +extern "C" { + #[link_name = "\u{1}_ZNK2cv7MatExpr3dotERKNS_3MatE"] + pub fn cv_MatExpr_dot(this: *const cv_MatExpr, m: *const cv_Mat) -> f64; +} +extern "C" { + #[link_name = "\u{1}_ZN2cv7MatExpr4swapERS0_"] + pub fn cv_MatExpr_swap(this: *mut cv_MatExpr, b: *mut cv_MatExpr); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv7MatExprC1Ev"] + pub fn cv_MatExpr_MatExpr(this: *mut cv_MatExpr); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv7MatExprC1ERKNS_3MatE"] + pub fn cv_MatExpr_MatExpr1(this: *mut cv_MatExpr, m: *const cv_Mat); +} +extern "C" { + #[link_name = "\u{1}_ZN2cv7MatExprC1EPKNS_5MatOpEiRKNS_3MatES6_S6_ddRKNS_7Scalar_IdEE"] + pub fn cv_MatExpr_MatExpr2( + this: *mut cv_MatExpr, + _op: *const cv_MatOp, + _flags: ::std::os::raw::c_int, + _a: *const cv_Mat, + _b: *const cv_Mat, + _c: *const cv_Mat, + _alpha: f64, + _beta: f64, + _s: *const cv_Scalar, + ); +} +impl cv_MatExpr { + #[inline] + pub unsafe fn size(&self) -> cv_Size { + cv_MatExpr_size(self) + } + #[inline] + pub unsafe fn type_(&self) -> ::std::os::raw::c_int { + cv_MatExpr_type(self) + } + #[inline] + pub unsafe fn row(&self, y: ::std::os::raw::c_int) -> cv_MatExpr { + cv_MatExpr_row(self, y) + } + #[inline] + pub unsafe fn col(&self, x: ::std::os::raw::c_int) -> cv_MatExpr { + cv_MatExpr_col(self, x) + } + #[inline] + pub unsafe fn diag(&self, d: ::std::os::raw::c_int) -> cv_MatExpr { + cv_MatExpr_diag(self, d) + } + #[inline] + pub unsafe fn t(&self) -> cv_MatExpr { + cv_MatExpr_t(self) + } + #[inline] + pub unsafe fn inv(&self, method: ::std::os::raw::c_int) -> cv_MatExpr { + cv_MatExpr_inv(self, method) + } + #[inline] + pub unsafe fn mul(&self, e: *const cv_MatExpr, scale: f64) -> cv_MatExpr { + cv_MatExpr_mul(self, e, scale) + } + #[inline] + pub unsafe fn mul1(&self, m: *const cv_Mat, scale: f64) -> cv_MatExpr { + cv_MatExpr_mul1(self, m, scale) + } + #[inline] + pub unsafe fn cross(&self, m: *const cv_Mat) -> cv_Mat { + cv_MatExpr_cross(self, m) + } + #[inline] + pub unsafe fn dot(&self, m: *const cv_Mat) -> f64 { + cv_MatExpr_dot(self, m) + } + #[inline] + pub unsafe fn swap(&mut self, b: *mut cv_MatExpr) { + cv_MatExpr_swap(self, b) + } + #[inline] + pub unsafe fn new() -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_MatExpr_MatExpr(__bindgen_tmp.as_mut_ptr()); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new1(m: *const cv_Mat) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_MatExpr_MatExpr1(__bindgen_tmp.as_mut_ptr(), m); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn new2( + _op: *const cv_MatOp, + _flags: ::std::os::raw::c_int, + _a: *const cv_Mat, + _b: *const cv_Mat, + _c: *const cv_Mat, + _alpha: f64, + _beta: f64, + _s: *const cv_Scalar, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + cv_MatExpr_MatExpr2( + __bindgen_tmp.as_mut_ptr(), + _op, + _flags, + _a, + _b, + _c, + _alpha, + _beta, + _s, + ); + __bindgen_tmp.assume_init() + } +} +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct Landmark { + pub x: f32, + pub y: f32, + pub z: f32, + pub visibility: f32, + pub presence: f32, +} +#[test] +fn bindgen_test_layout_Landmark() { + assert_eq!( + ::std::mem::size_of::(), + 20usize, + concat!("Size of: ", stringify!(Landmark)) + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(Landmark)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).x as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(Landmark), + "::", + stringify!(x) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).y as *const _ as usize }, + 4usize, + concat!( + "Offset of field: ", + stringify!(Landmark), + "::", + stringify!(y) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).z as *const _ as usize }, + 8usize, + concat!( + "Offset of field: ", + stringify!(Landmark), + "::", + stringify!(z) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).visibility as *const _ as usize }, + 12usize, + concat!( + "Offset of field: ", + stringify!(Landmark), + "::", + stringify!(visibility) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).presence as *const _ as usize }, + 16usize, + concat!( + "Offset of field: ", + stringify!(Landmark), + "::", + stringify!(presence) + ) + ); +} +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct Pose { + pub data: [Landmark; 33usize], +} +#[test] +fn bindgen_test_layout_Pose() { + assert_eq!( + ::std::mem::size_of::(), + 660usize, + concat!("Size of: ", stringify!(Pose)) + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(Pose)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).data as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(Pose), + "::", + stringify!(data) + ) + ); +} +#[repr(C)] +#[derive(Debug)] +pub struct PoseGraph { + pub poller: *mut ::std::os::raw::c_void, + pub graph: *mut ::std::os::raw::c_void, +} +#[test] +fn bindgen_test_layout_PoseGraph() { + assert_eq!( + ::std::mem::size_of::(), + 16usize, + concat!("Size of: ", stringify!(PoseGraph)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(PoseGraph)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).poller as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(PoseGraph), + "::", + stringify!(poller) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).graph as *const _ as usize }, + 8usize, + concat!( + "Offset of field: ", + stringify!(PoseGraph), + "::", + stringify!(graph) + ) + ); +} +extern "C" { + #[link_name = "\u{1}_ZN9PoseGraph7processEPKN2cv3MatER4Pose"] + pub fn PoseGraph_process(this: *mut PoseGraph, input: *const cv_Mat, output: *mut Pose) + -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZN9PoseGraphC1EPKcS1_"] + pub fn PoseGraph_PoseGraph( + this: *mut PoseGraph, + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN9PoseGraphD1Ev"] + pub fn PoseGraph_PoseGraph_destructor(this: *mut PoseGraph); +} +impl PoseGraph { + #[inline] + pub unsafe fn process(&mut self, input: *const cv_Mat, output: *mut Pose) -> bool { + PoseGraph_process(self, input, output) + } + #[inline] + pub unsafe fn new( + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + PoseGraph_PoseGraph(__bindgen_tmp.as_mut_ptr(), graph_config, output_node); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + PoseGraph_PoseGraph_destructor(self) + } +} +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct Hand { + pub data: [Landmark; 21usize], +} +#[test] +fn bindgen_test_layout_Hand() { + assert_eq!( + ::std::mem::size_of::(), + 420usize, + concat!("Size of: ", stringify!(Hand)) + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(Hand)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).data as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(Hand), + "::", + stringify!(data) + ) + ); +} +#[repr(C)] +#[derive(Debug)] +pub struct HandsGraph { + pub poller: *mut ::std::os::raw::c_void, + pub graph: *mut ::std::os::raw::c_void, +} +#[test] +fn bindgen_test_layout_HandsGraph() { + assert_eq!( + ::std::mem::size_of::(), + 16usize, + concat!("Size of: ", stringify!(HandsGraph)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(HandsGraph)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).poller as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(HandsGraph), + "::", + stringify!(poller) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).graph as *const _ as usize }, + 8usize, + concat!( + "Offset of field: ", + stringify!(HandsGraph), + "::", + stringify!(graph) + ) + ); +} +extern "C" { + #[link_name = "\u{1}_ZN10HandsGraph7processEPKN2cv3MatER4HandS5_"] + pub fn HandsGraph_process( + this: *mut HandsGraph, + input: *const cv_Mat, + left: *mut Hand, + right: *mut Hand, + ) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZN10HandsGraphC1EPKcS1_"] + pub fn HandsGraph_HandsGraph( + this: *mut HandsGraph, + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN10HandsGraphD1Ev"] + pub fn HandsGraph_HandsGraph_destructor(this: *mut HandsGraph); +} +impl HandsGraph { + #[inline] + pub unsafe fn process( + &mut self, + input: *const cv_Mat, + left: *mut Hand, + right: *mut Hand, + ) -> bool { + HandsGraph_process(self, input, left, right) + } + #[inline] + pub unsafe fn new( + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + HandsGraph_HandsGraph(__bindgen_tmp.as_mut_ptr(), graph_config, output_node); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + HandsGraph_HandsGraph_destructor(self) + } +} +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct FaceMesh { + pub data: [Landmark; 478usize], +} +#[test] +fn bindgen_test_layout_FaceMesh() { + assert_eq!( + ::std::mem::size_of::(), + 9560usize, + concat!("Size of: ", stringify!(FaceMesh)) + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(FaceMesh)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).data as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(FaceMesh), + "::", + stringify!(data) + ) + ); +} +#[repr(C)] +#[derive(Debug)] +pub struct FaceMeshGraph { + pub poller: *mut ::std::os::raw::c_void, + pub graph: *mut ::std::os::raw::c_void, +} +#[test] +fn bindgen_test_layout_FaceMeshGraph() { + assert_eq!( + ::std::mem::size_of::(), + 16usize, + concat!("Size of: ", stringify!(FaceMeshGraph)) + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(FaceMeshGraph)) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).poller as *const _ as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(FaceMeshGraph), + "::", + stringify!(poller) + ) + ); + assert_eq!( + unsafe { &(*(::std::ptr::null::())).graph as *const _ as usize }, + 8usize, + concat!( + "Offset of field: ", + stringify!(FaceMeshGraph), + "::", + stringify!(graph) + ) + ); +} +extern "C" { + #[link_name = "\u{1}_ZN13FaceMeshGraph7processEPKN2cv3MatER8FaceMesh"] + pub fn FaceMeshGraph_process( + this: *mut FaceMeshGraph, + input: *const cv_Mat, + mesh: *mut FaceMesh, + ) -> bool; +} +extern "C" { + #[link_name = "\u{1}_ZN13FaceMeshGraphC1EPKcS1_"] + pub fn FaceMeshGraph_FaceMeshGraph( + this: *mut FaceMeshGraph, + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ); +} +extern "C" { + #[link_name = "\u{1}_ZN13FaceMeshGraphD1Ev"] + pub fn FaceMeshGraph_FaceMeshGraph_destructor(this: *mut FaceMeshGraph); +} +impl FaceMeshGraph { + #[inline] + pub unsafe fn process(&mut self, input: *const cv_Mat, mesh: *mut FaceMesh) -> bool { + FaceMeshGraph_process(self, input, mesh) + } + #[inline] + pub unsafe fn new( + graph_config: *const ::std::os::raw::c_char, + output_node: *const ::std::os::raw::c_char, + ) -> Self { + let mut __bindgen_tmp = ::std::mem::MaybeUninit::uninit(); + FaceMeshGraph_FaceMeshGraph(__bindgen_tmp.as_mut_ptr(), graph_config, output_node); + __bindgen_tmp.assume_init() + } + #[inline] + pub unsafe fn destruct(&mut self) { + FaceMeshGraph_FaceMeshGraph_destructor(self) + } +} diff --git a/src/face_mesh_desktop_live.txt b/src/face_mesh_desktop_live.txt new file mode 100644 index 0000000..2cc5634 --- /dev/null +++ b/src/face_mesh_desktop_live.txt @@ -0,0 +1,66 @@ +# MediaPipe graph that performs face mesh with TensorFlow Lite on CPU. + +# Input image. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" +# Collection of detected/processed faces, each represented as a list of +# landmarks. (std::vector) +output_stream: "multi_face_landmarks" + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Defines side packets for further use in the graph. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:0:num_faces" + output_side_packet: "PACKET:1:with_attention" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 1 } + packet { bool_value: true } + } + } +} + +# Subgraph that detects faces and corresponding landmarks. +node { + calculator: "FaceLandmarkFrontCpu" + input_stream: "IMAGE:throttled_input_video" + input_side_packet: "NUM_FACES:num_faces" + input_side_packet: "WITH_ATTENTION:with_attention" + output_stream: "LANDMARKS:multi_face_landmarks" + output_stream: "ROIS_FROM_LANDMARKS:face_rects_from_landmarks" + output_stream: "DETECTIONS:face_detections" + output_stream: "ROIS_FROM_DETECTIONS:face_rects_from_detections" +} + +# Subgraph that renders face-landmark annotation onto the input image. +node { + calculator: "FaceRendererCpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:multi_face_landmarks" + input_stream: "NORM_RECTS:face_rects_from_landmarks" + input_stream: "DETECTIONS:face_detections" + output_stream: "IMAGE:output_video" +} diff --git a/src/hand_tracking_desktop_live.txt b/src/hand_tracking_desktop_live.txt new file mode 100644 index 0000000..4f34c55 --- /dev/null +++ b/src/hand_tracking_desktop_live.txt @@ -0,0 +1,50 @@ +# MediaPipe graph that performs hands tracking on desktop with TensorFlow +# Lite on CPU. +# Used in the example in +# mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu. + +# CPU image. (ImageFrame) +input_stream: "input_video" + +# CPU image. (ImageFrame) +output_stream: "output_video" + +# Collection of detected/predicted hands, each represented as a list of +# landmarks. (std::vector) +output_stream: "hand_landmarks" + +# Generates side packet cotaining max number of hands to detect/track. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:num_hands" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { int_value: 2 } + } + } +} + +# Detects/tracks hand landmarks. +node { + calculator: "HandLandmarkTrackingCpu" + input_stream: "IMAGE:input_video" + input_side_packet: "NUM_HANDS:num_hands" + output_stream: "LANDMARKS:hand_landmarks" + output_stream: "HANDEDNESS:handedness" + output_stream: "PALM_DETECTIONS:multi_palm_detections" + output_stream: "HAND_ROIS_FROM_LANDMARKS:multi_hand_rects" + output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:multi_palm_rects" +} + +# Subgraph that renders annotations and overlays them on top of the input +# images (see hand_renderer_cpu.pbtxt). +node { + calculator: "HandRendererSubgraph" + input_stream: "IMAGE:input_video" + input_stream: "DETECTIONS:multi_palm_detections" + input_stream: "LANDMARKS:hand_landmarks" + input_stream: "HANDEDNESS:handedness" + input_stream: "NORM_RECTS:0:multi_palm_rects" + input_stream: "NORM_RECTS:1:multi_hand_rects" + output_stream: "IMAGE:output_video" +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..37c26c2 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,394 @@ +#![allow(dead_code)] +#![allow(non_camel_case_types)] +#![allow(non_upper_case_globals)] +#![allow(non_snake_case)] +#![allow(improper_ctypes)] +#![allow(deref_nullptr)] + +// ------------------------------------------------------------------------------------------------- +// LINKING +// ------------------------------------------------------------------------------------------------- + +#[link(name = "mediagraph")] +extern "C" {} + +use opencv::prelude::*; +use std::ffi::CString; + +mod bindings; + +pub use bindings::*; + +impl Default for Landmark { + fn default() -> Self { + Self { + x: 0.0, + y: 0.0, + z: 0.0, + visibility: 0.0, + presence: 0.0, + } + } +} + +impl Default for Pose { + fn default() -> Self { + Self { + data: [Landmark::default(); 33], + } + } +} + +impl Default for Hand { + fn default() -> Self { + Self { + data: [Landmark::default(); 21], + } + } +} + +impl Default for FaceMesh { + fn default() -> Self { + Self { + data: [Landmark::default(); 478], + } + } +} + +pub mod pose { + use super::*; + + pub enum PoseLandmark { + NOSE = 0, + LEFT_EYE_INNER = 1, + LEFT_EYE = 2, + LEFT_EYE_OUTER = 3, + RIGHT_EYE_INNER = 4, + RIGHT_EYE = 5, + RIGHT_EYE_OUTER = 6, + LEFT_EAR = 7, + RIGHT_EAR = 8, + MOUTH_LEFT = 9, + MOUTH_RIGHT = 10, + LEFT_SHOULDER = 11, + RIGHT_SHOULDER = 12, + LEFT_ELBOW = 13, + RIGHT_ELBOW = 14, + LEFT_WRIST = 15, + RIGHT_WRIST = 16, + LEFT_PINKY = 17, + RIGHT_PINKY = 18, + LEFT_INDEX = 19, + RIGHT_INDEX = 20, + LEFT_THUMB = 21, + RIGHT_THUMB = 22, + LEFT_HIP = 23, + RIGHT_HIP = 24, + LEFT_KNEE = 25, + RIGHT_KNEE = 26, + LEFT_ANKLE = 27, + RIGHT_ANKLE = 28, + LEFT_HEEL = 29, + RIGHT_HEEL = 30, + LEFT_FOOT_INDEX = 31, + RIGHT_FOOT_INDEX = 32, + } + + pub struct PoseDetector { + pub mode: bool, // false + pub smooth: bool, // true, + pub detection_con: f32, // 0.5 + pub track_con: f32, // 0.5 + pub graph: PoseGraph, + } + + impl PoseDetector { + pub fn new(mode: bool, smooth: bool, detection_con: f32, track_con: f32) -> Self { + let graph_config = + CString::new(include_str!("pose_tracking_cpu.txt")).expect("CString::new failed"); + let output_node = CString::new("pose_landmarks").expect("CString::new failed"); + + let graph: PoseGraph = + unsafe { PoseGraph::new(graph_config.as_ptr(), output_node.as_ptr()) }; + + Self { + mode, + smooth, + detection_con, + track_con, + graph, + } + } + + pub fn process(&mut self, input: &Mat, pose: *mut Pose) -> bool { + unsafe { + let frame = input.as_raw() as *const cv_Mat; + self.graph.process(frame, pose) + } + } + + // // draw true + // pub fn find_pose(&self, img: &[u8], draw: bool) {} + + // // draw: true, bbox_with_hands: false + // pub fn find_position(&self, img: &[u8], draw: bool, bbox_with_hands: bool) {} + + // // draw: true + // pub fn find_angle( + // &self, + // img: &[u8], + // p1: cgmath::Point2, + // p2: cgmath::Point2, + // draw: bool, + // ) { + // } + + // pub fn find_distance( + // &self, + // p1: cgmath::Point2, + // p2: cgmath::Point2, + // img: Option<&[u8]>, + // r: f32, + // t: f32, + // ) { + // } + + // pub fn anlge_check(&self, my_angle: f32, target_angle: f32, add_on: f32) {} + } + + impl Default for PoseDetector { + fn default() -> Self { + Self::new(false, true, 0.5, 0.5) + } + } +} + +pub mod face_mesh { + use super::*; + + pub struct FaceMeshDetector { + pub static_mode: bool, // false + pub max_faces: usize, // 2 + pub min_detection_con: f32, // 0.5 + pub min_track_con: f32, // 0.5 + pub graph: FaceMeshGraph, + } + + impl FaceMeshDetector { + pub fn new( + static_mode: bool, + max_faces: usize, + min_detection_con: f32, + min_track_con: f32, + ) -> Self { + let graph_config = CString::new(include_str!("face_mesh_desktop_live.txt")) + .expect("CString::new failed"); + let output_node = CString::new("multi_face_landmarks").expect("CString::new failed"); + + let graph: FaceMeshGraph = + unsafe { FaceMeshGraph::new(graph_config.as_ptr(), output_node.as_ptr()) }; + Self { + static_mode, + max_faces, + min_detection_con, + min_track_con, + graph, + } + } + + pub fn process(&mut self, input: &Mat, mesh: *mut FaceMesh) -> bool { + unsafe { + let frame = input.as_raw() as *const cv_Mat; + self.graph.process(frame, mesh) + } + } + // // draw: true + // pub fn find_face_mesh(&self, img: &[u8], draw: bool) {} + + // pub fn find_distance( + // &self, + // p1: cgmath::Point2, + // p2: cgmath::Point2, + // img: Option<&[u8]>, + // ) { + // } + } + + impl Default for FaceMeshDetector { + fn default() -> Self { + Self::new(false, 2, 0.5, 0.5) + } + } +} + +// pub mod face_detection { +// pub enum FaceKeyPoint { +// RIGHT_EYE = 0, +// LEFT_EYE = 1, +// NOSE_TIP = 2, +// MOUTH_CENTER = 3, +// RIGHT_EAR_TRAGION = 4, +// LEFT_EAR_TRAGION = 5, +// } +// pub struct FaceDetection {} + +// impl FaceDetection { +// pub fn process(&self /* image */) /*NamedTuple*/ {} +// } + +// pub struct FaceDetector { +// pub min_detection_con: f32, // 0.5 +// pub face_detection: FaceDetection, +// } + +// impl FaceDetector { +// pub fn new(min_detection_con: f32) -> Self { +// Self { +// min_detection_con, +// face_detection: todo!(), +// } +// } + +// // draw: true +// pub fn find_faces(&self, img: &[u8], draw: bool) {} +// } + +// impl Default for FaceDetector { +// fn default() -> Self { +// Self::new(0.5) +// } +// } +// } + +pub mod hands { + use super::*; + // use mediapipe::*; + // use std::collections::HashMap; + + pub enum HandLandmark { + WRIST = 0, + THUMB_CMC = 1, + THUMB_MCP = 2, + THUMB_IP = 3, + THUMB_TIP = 4, + INDEX_FINGER_MCP = 5, + INDEX_FINGER_PIP = 6, + INDEX_FINGER_DIP = 7, + INDEX_FINGER_TIP = 8, + MIDDLE_FINGER_MCP = 9, + MIDDLE_FINGER_PIP = 10, + MIDDLE_FINGER_DIP = 11, + MIDDLE_FINGER_TIP = 12, + RING_FINGER_MCP = 13, + RING_FINGER_PIP = 14, + RING_FINGER_DIP = 15, + RING_FINGER_TIP = 16, + PINKY_MCP = 17, + PINKY_PIP = 18, + PINKY_DIP = 19, + PINKY_TIP = 20, + } + + pub struct HandDetector { + pub mode: bool, + pub max_hands: usize, + pub detection_con: f32, // 0.5 + pub min_track_con: f32, // 0.5 + pub graph: HandsGraph, + } + + impl HandDetector { + pub fn new(mode: bool, max_hands: usize, detection_con: f32, min_track_con: f32) -> Self { + // // ::std::vector<::mediapipe::NormalizedLandmarkList> + // let graph_config = CString::new(include_str!("face_mesh_desktop_live.txt")).expect("CString::new failed"); + // let output_node = CString::new("multi_face_landmarks").expect("CString::new failed"); + + let graph_config = CString::new(include_str!("hand_tracking_desktop_live.txt")) + .expect("CString::new failed"); + let output_node = CString::new("hand_landmarks").expect("CString::new failed"); + + let graph: HandsGraph = + unsafe { HandsGraph::new(graph_config.as_ptr(), output_node.as_ptr()) }; + + Self { + mode, + max_hands, + detection_con, + min_track_con, + graph, + } + } + + pub fn process(&mut self, input: &Mat, left: *mut Hand, right: *mut Hand) -> bool { + unsafe { + let frame = input.as_raw() as *const cv_Mat; + self.graph.process(frame, left, right) + } + } + // // draw: true, flip_type: tru + // pub fn find_hands(&self, img: &[u8], draw: bool, flip_type: bool) {} + + // pub fn fingers_up(&self, my_hand: &HashMap) /*List of which fingers are up*/ + // { + // } + + // pub fn find_distance( + // &self, + // p1: cgmath::Point2, + // p2: cgmath::Point2, + // img: Option<&[u8]>, + // ) { + // } + } + + impl Default for HandDetector { + fn default() -> Self { + Self::new(false, 2, 0.5, 0.5) + } + } +} + +// pub mod objectron { +// pub struct Objectron {} + +// impl Objectron { +// pub fn process(&self /* image */) /*NamedTuple*/ {} +// } +// } + +// pub mod selfie_segmentation { +// pub struct SelfieSegmentation {} + +// impl SelfieSegmentation { +// pub fn process(&self /* image */) /*NamedTuple*/ {} +// } + +// pub struct SelfieSegmentationDetector { +// pub model: usize, // 0 is general 1 is landscape(faster) +// pub selfie_segmentation: SelfieSegmentation, +// } + +// impl SelfieSegmentationDetector { +// pub fn new(model: usize) -> Self { +// todo!() +// } + +// // threshold: 0.1 +// pub fn remove_bg(&self, img: &[u8], img_bg: [u8; 3], threshold: f32) {} +// } + +// impl Default for SelfieSegmentationDetector { +// fn default() -> Self { +// Self::new(1) +// } +// } +// } + +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + let result = 2 + 2; + assert_eq!(result, 4); + } +} diff --git a/src/pose_tracking_cpu.txt b/src/pose_tracking_cpu.txt new file mode 100644 index 0000000..31d847e --- /dev/null +++ b/src/pose_tracking_cpu.txt @@ -0,0 +1,63 @@ +# MediaPipe graph that performs pose tracking with TensorFlow Lite on CPU. + +# CPU buffer. (ImageFrame) +input_stream: "input_video" + +# Output image with rendered results. (ImageFrame) +output_stream: "output_video" +# Pose landmarks. (NormalizedLandmarkList) +output_stream: "pose_landmarks" + +# Generates side packet to enable segmentation. +node { + calculator: "ConstantSidePacketCalculator" + output_side_packet: "PACKET:enable_segmentation" + node_options: { + [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { + packet { bool_value: true } + } + } +} + +# Throttles the images flowing downstream for flow control. It passes through +# the very first incoming image unaltered, and waits for downstream nodes +# (calculators and subgraphs) in the graph to finish their tasks before it +# passes through another image. All images that come in while waiting are +# dropped, limiting the number of in-flight images in most part of the graph to +# 1. This prevents the downstream nodes from queuing up incoming images and data +# excessively, which leads to increased latency and memory usage, unwanted in +# real-time mobile applications. It also eliminates unnecessarily computation, +# e.g., the output produced by a node may get dropped downstream if the +# subsequent nodes are still busy processing previous inputs. +node { + calculator: "FlowLimiterCalculator" + input_stream: "input_video" + input_stream: "FINISHED:output_video" + input_stream_info: { + tag_index: "FINISHED" + back_edge: true + } + output_stream: "throttled_input_video" +} + +# Subgraph that detects poses and corresponding landmarks. +node { + calculator: "PoseLandmarkCpu" + input_side_packet: "ENABLE_SEGMENTATION:enable_segmentation" + input_stream: "IMAGE:throttled_input_video" + output_stream: "LANDMARKS:pose_landmarks" + output_stream: "SEGMENTATION_MASK:segmentation_mask" + output_stream: "DETECTION:pose_detection" + output_stream: "ROI_FROM_LANDMARKS:roi_from_landmarks" +} + +# Subgraph that renders pose-landmark annotation onto the input image. +node { + calculator: "PoseRendererCpu" + input_stream: "IMAGE:throttled_input_video" + input_stream: "LANDMARKS:pose_landmarks" + input_stream: "SEGMENTATION_MASK:segmentation_mask" + input_stream: "DETECTION:pose_detection" + input_stream: "ROI:roi_from_landmarks" + output_stream: "IMAGE:output_video" +}